You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@dlab.apache.org by my...@apache.org on 2020/09/24 12:40:48 UTC

[incubator-dlab] branch DLAB-2067 created (now a30e375)

This is an automated email from the ASF dual-hosted git repository.

mykolabodnar pushed a change to branch DLAB-2067
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git.


      at a30e375  [DLAB-2067]: Replaced old name by new one in all sources in code - dlab --> datalab in placeholders - DLAB --> DATA_LAB in filenames - dlab --> datalab in comments/readme/etc - DLab --> Data Lab

This branch includes the following new commits:

     new a30e375  [DLAB-2067]: Replaced old name by new one in all sources in code - dlab --> datalab in placeholders - DLAB --> DATA_LAB in filenames - dlab --> datalab in comments/readme/etc - DLab --> Data Lab

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org


[incubator-dlab] 01/01: [DLAB-2067]: Replaced old name by new one in all sources in code - dlab --> datalab in placeholders - DLAB --> DATA_LAB in filenames - dlab --> datalab in comments/readme/etc - DLab --> Data Lab

Posted by my...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mykolabodnar pushed a commit to branch DLAB-2067
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git

commit a30e3753a8747b0b4229d13b03e89634cec316e6
Author: bodnarmykola <bo...@gmail.com>
AuthorDate: Thu Sep 24 15:35:57 2020 +0300

    [DLAB-2067]: Replaced old name by new one in all sources
    in code - dlab --> datalab
    in placeholders - DLAB --> DATA_LAB
    in filenames - dlab --> datalab
    in comments/readme/etc - DLab --> Data Lab
---
 CONTRIBUTING.md                                    |   6 +-
 README.md                                          | 380 ++++++++++-----------
 RELEASE_NOTES.md                                   |  16 +-
 USER_GUIDE.md                                      |  58 ++--
 doc/{dlab_aws.png => datalab_aws.png}              | Bin
 doc/{dlab_azure.png => datalab_azure.png}          | Bin
 doc/{dlab_gcp.png => datalab_gcp.png}              | Bin
 .../scripts/POST_DEPLOYMENT.md                     |  10 +-
 .../scripts/{deploy_dlab.py => deploy_datalab.py}  |  48 +--
 .../scripts/deploy_repository/deploy_repository.py |  24 +-
 .../scripts/post-deployment_configuration.py       | 110 +++---
 .../src/base/scripts/install_prerequisites.py      |   4 +-
 .../src/base/scripts/install_user_key.py           |   2 +-
 .../src/dataengine-service/fabfile.py              |   6 +-
 .../src/dataengine/fabfile.py                      |   6 +-
 .../src/dataengine/scripts/configure_dataengine.py |   6 +-
 .../src/deeplearning/fabfile.py                    |   6 +-
 .../scripts/configure_deep_learning_node.py        |   8 +-
 infrastructure-provisioning/src/edge/fabfile.py    |   2 +-
 .../src/edge/scripts/configure_http_proxy.py       |   2 +-
 .../edge/scripts/configure_nginx_reverse_proxy.py  |   2 +-
 .../src/edge/scripts/reupload_ssh_key.py           |   6 +-
 .../src/edge/templates/squid.conf                  |   4 +-
 .../src/general/api/check_inactivity.py            |   2 +-
 .../src/general/api/configure.py                   |   2 +-
 .../src/general/api/create.py                      |   2 +-
 .../src/general/api/create_image.py                |   2 +-
 .../src/general/api/git_creds.py                   |   2 +-
 .../src/general/api/install_libs.py                |   2 +-
 .../src/general/api/list_libs.py                   |   4 +-
 .../src/general/api/reconfigure_spark.py           |   2 +-
 .../src/general/api/recreate.py                    |   2 +-
 .../src/general/api/reupload_key.py                |   2 +-
 .../src/general/api/start.py                       |   2 +-
 .../src/general/api/status.py                      |   2 +-
 .../src/general/api/stop.py                        |   2 +-
 .../src/general/api/terminate.py                   |   2 +-
 .../src/general/api/terminate_image.py             |   2 +-
 .../src/general/conf/{dlab.ini => datalab.ini}     |  42 +--
 .../src/general/files/aws/base_Dockerfile          |   8 +-
 .../src/general/files/aws/base_description.json    |   2 +-
 .../files/aws/dataengine-service_Dockerfile        |   6 +-
 .../src/general/files/aws/dataengine_Dockerfile    |   4 +-
 .../src/general/files/aws/deeplearning_Dockerfile  |   4 +-
 .../src/general/files/aws/edge_Dockerfile          |   4 +-
 .../src/general/files/aws/jupyter_Dockerfile       |   4 +-
 .../src/general/files/aws/jupyterlab_Dockerfile    |   4 +-
 .../src/general/files/aws/project_Dockerfile       |   4 +-
 .../src/general/files/aws/rstudio_Dockerfile       |   4 +-
 .../src/general/files/aws/ssn_Dockerfile           |   4 +-
 .../src/general/files/aws/ssn_description.json     |   2 +-
 .../general/files/aws/tensor-rstudio_Dockerfile    |   4 +-
 .../src/general/files/aws/tensor_Dockerfile        |   4 +-
 .../src/general/files/aws/zeppelin_Dockerfile      |   4 +-
 .../src/general/files/azure/base_Dockerfile        |   8 +-
 .../src/general/files/azure/base_description.json  |   2 +-
 .../src/general/files/azure/dataengine_Dockerfile  |   4 +-
 .../general/files/azure/deeplearning_Dockerfile    |   4 +-
 .../src/general/files/azure/edge_Dockerfile        |   4 +-
 .../src/general/files/azure/jupyter_Dockerfile     |   4 +-
 .../src/general/files/azure/jupyterlab_Dockerfile  |   4 +-
 .../src/general/files/azure/project_Dockerfile     |   4 +-
 .../src/general/files/azure/rstudio_Dockerfile     |   4 +-
 .../src/general/files/azure/ssn_Dockerfile         |   4 +-
 .../src/general/files/azure/ssn_description.json   |   2 +-
 .../src/general/files/azure/tensor_Dockerfile      |   4 +-
 .../src/general/files/azure/zeppelin_Dockerfile    |   4 +-
 .../src/general/files/gcp/base_Dockerfile          |   8 +-
 .../src/general/files/gcp/base_description.json    |   2 +-
 .../files/gcp/dataengine-service_Dockerfile        |   4 +-
 .../src/general/files/gcp/dataengine_Dockerfile    |   4 +-
 .../src/general/files/gcp/deeplearning_Dockerfile  |   4 +-
 .../src/general/files/gcp/edge_Dockerfile          |   4 +-
 .../src/general/files/gcp/jupyter_Dockerfile       |   4 +-
 .../src/general/files/gcp/jupyterlab_Dockerfile    |   4 +-
 .../src/general/files/gcp/project_Dockerfile       |   4 +-
 .../src/general/files/gcp/rstudio_Dockerfile       |   4 +-
 .../src/general/files/gcp/ssn_Dockerfile           |   4 +-
 .../src/general/files/gcp/ssn_description.json     |   2 +-
 .../src/general/files/gcp/superset_Dockerfile      |   4 +-
 .../general/files/gcp/tensor-rstudio_Dockerfile    |   4 +-
 .../src/general/files/gcp/tensor_Dockerfile        |   4 +-
 .../src/general/files/gcp/zeppelin_Dockerfile      |   4 +-
 .../src/general/lib/aws/actions_lib.py             |  20 +-
 .../src/general/lib/aws/meta_lib.py                |   2 +-
 .../src/general/lib/azure/actions_lib.py           |  38 +--
 .../src/general/lib/gcp/actions_lib.py             |  20 +-
 .../src/general/lib/gcp/meta_lib.py                |   2 +-
 .../src/general/lib/os/debian/edge_lib.py          |  44 +--
 .../src/general/lib/os/debian/notebook_lib.py      |   8 +-
 .../src/general/lib/os/debian/ssn_lib.py           | 106 +++---
 .../src/general/lib/os/fab.py                      |  58 ++--
 .../src/general/lib/os/redhat/edge_lib.py          |  36 +-
 .../src/general/lib/os/redhat/notebook_lib.py      |   8 +-
 .../src/general/lib/os/redhat/ssn_lib.py           |  98 +++---
 .../src/general/scripts/aws/common_collect_data.py |   6 +-
 .../general/scripts/aws/common_create_bucket.py    |   4 +-
 .../general/scripts/aws/common_create_instance.py  |   4 +-
 .../scripts/aws/common_create_notebook_image.py    |  16 +-
 .../general/scripts/aws/common_create_policy.py    |   4 +-
 .../scripts/aws/common_create_role_policy.py       |   4 +-
 .../scripts/aws/common_create_security_group.py    |   4 +-
 .../general/scripts/aws/common_create_subnet.py    |  22 +-
 .../scripts/aws/common_download_git_certfile.py    |   4 +-
 ...common_notebook_configure_dataengine-service.py |  40 +--
 .../aws/common_notebook_configure_dataengine.py    |  34 +-
 .../general/scripts/aws/common_prepare_notebook.py |  30 +-
 .../general/scripts/aws/common_put_to_bucket.py    |   2 +-
 .../scripts/aws/common_remove_remote_kernels.py    |   4 +-
 .../src/general/scripts/aws/common_reupload_key.py |   6 +-
 .../general/scripts/aws/common_start_notebook.py   |  24 +-
 .../general/scripts/aws/common_stop_notebook.py    |  28 +-
 .../scripts/aws/common_terminate_notebook.py       |  22 +-
 .../scripts/aws/common_terminate_notebook_image.py |  14 +-
 .../scripts/aws/dataengine-service_configure.py    |  66 ++--
 .../scripts/aws/dataengine-service_create.py       |   4 +-
 .../scripts/aws/dataengine-service_install_libs.py |   6 +-
 .../scripts/aws/dataengine-service_list_libs.py    |   6 +-
 .../scripts/aws/dataengine-service_prepare.py      |  48 +--
 .../scripts/aws/dataengine-service_terminate.py    |  20 +-
 .../general/scripts/aws/dataengine_configure.py    |  98 +++---
 .../src/general/scripts/aws/dataengine_prepare.py  |  60 ++--
 .../src/general/scripts/aws/dataengine_start.py    |  20 +-
 .../src/general/scripts/aws/dataengine_stop.py     |  12 +-
 .../general/scripts/aws/dataengine_terminate.py    |  14 +-
 .../general/scripts/aws/deeplearning_configure.py  | 108 +++---
 .../scripts/aws/edge_associate_elastic_ip.py       |   6 +-
 .../src/general/scripts/aws/edge_configure.py      |  86 ++---
 .../scripts/aws/edge_configure_route_table.py      |   4 +-
 .../src/general/scripts/aws/edge_start.py          |  18 +-
 .../src/general/scripts/aws/edge_status.py         |  12 +-
 .../src/general/scripts/aws/edge_stop.py           |  14 +-
 .../src/general/scripts/aws/jupyter_configure.py   | 108 +++---
 .../jupyter_dataengine-service_create_configs.py   |   8 +-
 .../jupyter_install_dataengine-service_kernels.py  |  14 +-
 .../general/scripts/aws/jupyterlab_configure.py    | 118 +++----
 .../src/general/scripts/aws/project_prepare.py     | 166 ++++-----
 .../src/general/scripts/aws/project_terminate.py   |  52 +--
 .../src/general/scripts/aws/rstudio_configure.py   | 112 +++---
 .../rstudio_dataengine-service_create_configs.py   |   8 +-
 .../rstudio_install_dataengine-service_kernels.py  |  14 +-
 .../scripts/aws/ssn_associate_elastic_ip.py        |   6 +-
 .../src/general/scripts/aws/ssn_configure.py       | 114 +++----
 .../src/general/scripts/aws/ssn_create_endpoint.py |   6 +-
 .../src/general/scripts/aws/ssn_create_vpc.py      |   4 +-
 .../src/general/scripts/aws/ssn_finalize.py        |   2 +-
 .../src/general/scripts/aws/ssn_prepare.py         | 256 +++++++-------
 .../src/general/scripts/aws/ssn_terminate.py       |  18 +-
 .../scripts/aws/ssn_terminate_aws_resources.py     |  84 ++---
 .../scripts/aws/tensor-rstudio_configure.py        | 112 +++---
 .../src/general/scripts/aws/tensor_configure.py    | 106 +++---
 .../src/general/scripts/aws/zeppelin_configure.py  | 108 +++---
 .../zeppelin_dataengine-service_create_configs.py  |   8 +-
 .../zeppelin_install_dataengine-service_kernels.py |  14 +-
 .../general/scripts/azure/common_collect_data.py   |   6 +-
 .../azure/common_create_datalake_directory.py      |   6 +-
 .../scripts/azure/common_create_instance.py        |   8 +-
 .../scripts/azure/common_create_notebook_image.py  |  24 +-
 .../scripts/azure/common_create_security_group.py  |   4 +-
 .../scripts/azure/common_create_storage_account.py |   6 +-
 .../general/scripts/azure/common_create_subnet.py  |  10 +-
 .../scripts/azure/common_download_git_certfile.py  |   4 +-
 .../azure/common_notebook_configure_dataengine.py  |  26 +-
 .../scripts/azure/common_prepare_notebook.py       |  24 +-
 .../scripts/azure/common_remove_remote_kernels.py  |   4 +-
 .../general/scripts/azure/common_reupload_key.py   |   6 +-
 .../general/scripts/azure/common_start_notebook.py |  20 +-
 .../general/scripts/azure/common_stop_notebook.py  |  18 +-
 .../scripts/azure/common_terminate_notebook.py     |  18 +-
 .../azure/common_terminate_notebook_image.py       |  12 +-
 .../general/scripts/azure/dataengine_configure.py  |  80 ++---
 .../general/scripts/azure/dataengine_prepare.py    |  26 +-
 .../src/general/scripts/azure/dataengine_start.py  |  16 +-
 .../src/general/scripts/azure/dataengine_stop.py   |  16 +-
 .../general/scripts/azure/dataengine_terminate.py  |  18 +-
 .../scripts/azure/deeplearning_configure.py        |  72 ++--
 .../src/general/scripts/azure/edge_configure.py    |  46 +--
 .../src/general/scripts/azure/edge_prepare.py      |   8 +-
 .../src/general/scripts/azure/edge_start.py        |  14 +-
 .../src/general/scripts/azure/edge_status.py       |   8 +-
 .../src/general/scripts/azure/edge_stop.py         |  14 +-
 .../src/general/scripts/azure/edge_terminate.py    |  30 +-
 .../src/general/scripts/azure/jupyter_configure.py |  72 ++--
 .../general/scripts/azure/jupyterlab_configure.py  |  74 ++--
 .../src/general/scripts/azure/project_prepare.py   |  34 +-
 .../src/general/scripts/azure/project_terminate.py |  32 +-
 .../general/scripts/azure/rstudio_change_pass.py   |   2 +-
 .../src/general/scripts/azure/rstudio_configure.py |  76 ++---
 .../src/general/scripts/azure/ssn_configure.py     |  60 ++--
 .../general/scripts/azure/ssn_create_datalake.py   |   6 +-
 .../general/scripts/azure/ssn_create_peering.py    |   4 +-
 .../scripts/azure/ssn_create_resource_group.py     |   4 +-
 .../src/general/scripts/azure/ssn_create_vpc.py    |   4 +-
 .../src/general/scripts/azure/ssn_prepare.py       |  42 +--
 .../src/general/scripts/azure/ssn_terminate.py     |  38 +--
 .../src/general/scripts/azure/tensor_configure.py  |  70 ++--
 .../general/scripts/azure/zeppelin_configure.py    |  70 ++--
 .../src/general/scripts/gcp/common_collect_data.py |   6 +-
 .../general/scripts/gcp/common_create_bucket.py    |   4 +-
 .../general/scripts/gcp/common_create_firewall.py  |   4 +-
 .../general/scripts/gcp/common_create_instance.py  |   4 +-
 .../general/scripts/gcp/common_create_nat_route.py |   4 +-
 .../scripts/gcp/common_create_notebook_image.py    |  14 +-
 .../scripts/gcp/common_create_service_account.py   |   4 +-
 .../general/scripts/gcp/common_create_subnet.py    |  18 +-
 .../scripts/gcp/common_download_git_certfile.py    |   2 +-
 ...common_notebook_configure_dataengine-service.py |  18 +-
 .../gcp/common_notebook_configure_dataengine.py    |  26 +-
 .../general/scripts/gcp/common_prepare_notebook.py |  20 +-
 .../general/scripts/gcp/common_put_to_bucket.py    |   2 +-
 .../src/general/scripts/gcp/common_reupload_key.py |   6 +-
 .../general/scripts/gcp/common_start_notebook.py   |  18 +-
 .../general/scripts/gcp/common_stop_notebook.py    |  20 +-
 .../scripts/gcp/common_terminate_notebook.py       |  20 +-
 .../scripts/gcp/common_terminate_notebook_image.py |  14 +-
 .../scripts/gcp/dataengine-service_configure.py    |  38 +--
 .../scripts/gcp/dataengine-service_create.py       |   4 +-
 .../scripts/gcp/dataengine-service_install_libs.py |   6 +-
 .../scripts/gcp/dataengine-service_list_libs.py    |   6 +-
 .../scripts/gcp/dataengine-service_prepare.py      |  26 +-
 .../scripts/gcp/dataengine-service_terminate.py    |  16 +-
 .../general/scripts/gcp/dataengine_configure.py    |  76 ++---
 .../src/general/scripts/gcp/dataengine_prepare.py  |  24 +-
 .../src/general/scripts/gcp/dataengine_start.py    |  18 +-
 .../src/general/scripts/gcp/dataengine_stop.py     |  16 +-
 .../general/scripts/gcp/dataengine_terminate.py    |  20 +-
 .../general/scripts/gcp/deeplearning_configure.py  |  58 ++--
 .../src/general/scripts/gcp/edge_configure.py      |  46 +--
 .../general/scripts/gcp/edge_create_static_ip.py   |   6 +-
 .../src/general/scripts/gcp/edge_start.py          |  14 +-
 .../src/general/scripts/gcp/edge_status.py         |  10 +-
 .../src/general/scripts/gcp/edge_stop.py           |  14 +-
 .../src/general/scripts/gcp/edge_terminate.py      |  28 +-
 .../src/general/scripts/gcp/jupyter_configure.py   |  58 ++--
 .../jupyter_dataengine-service_create_configs.py   |   8 +-
 .../jupyter_install_dataengine-service_kernels.py  |  16 +-
 .../general/scripts/gcp/jupyterlab_configure.py    |  66 ++--
 .../src/general/scripts/gcp/project_prepare.py     |  34 +-
 .../src/general/scripts/gcp/project_terminate.py   |  28 +-
 .../src/general/scripts/gcp/rstudio_configure.py   |  64 ++--
 .../rstudio_dataengine-service_create_configs.py   |   8 +-
 .../rstudio_install_dataengine-service_kernels.py  |  14 +-
 .../src/general/scripts/gcp/ssn_configure.py       |  58 ++--
 .../general/scripts/gcp/ssn_create_static_ip.py    |   6 +-
 .../src/general/scripts/gcp/ssn_create_vpc.py      |   4 +-
 .../src/general/scripts/gcp/ssn_finalize.py        |   2 +-
 .../src/general/scripts/gcp/ssn_prepare.py         |  30 +-
 .../src/general/scripts/gcp/ssn_terminate.py       |  14 +-
 .../scripts/gcp/ssn_terminate_gcp_resources.py     |   6 +-
 .../src/general/scripts/gcp/superset_configure.py  |  70 ++--
 .../scripts/gcp/tensor-rstudio_configure.py        |  64 ++--
 .../src/general/scripts/gcp/tensor_configure.py    |  58 ++--
 .../src/general/scripts/gcp/zeppelin_configure.py  |  58 ++--
 .../zeppelin_dataengine-service_create_configs.py  |   8 +-
 .../zeppelin_install_dataengine-service_kernels.py |  14 +-
 .../src/general/scripts/os/check_inactivity.py     |   6 +-
 .../general/scripts/os/common_clean_instance.py    |   2 +-
 .../general/scripts/os/common_configure_proxy.py   |   2 +-
 .../scripts/os/common_configure_reverse_proxy.py   |   2 +-
 .../general/scripts/os/common_configure_spark.py   |  10 +-
 .../scripts/os/configure_proxy_for_docker.py       |   2 +-
 .../general/scripts/os/dataengine_install_libs.py  |   6 +-
 .../src/general/scripts/os/dataengine_list_libs.py |   6 +-
 .../scripts/os/dataengine_reconfigure_spark.py     |   6 +-
 .../os/deeplearning_dataengine_create_configs.py   |   8 +-
 .../os/deeplearning_install_dataengine_kernels.py  |  14 +-
 .../general/scripts/os/get_list_available_pkgs.py  |   4 +-
 .../general/scripts/os/install_additional_libs.py  |   4 +-
 .../os/jupyter_dataengine_create_configs.py        |   8 +-
 .../os/jupyter_install_dataengine_kernels.py       |  14 +-
 .../scripts/os/jupyterlab_container_start.py       |   4 +-
 .../src/general/scripts/os/manage_git_creds.py     |   6 +-
 .../src/general/scripts/os/notebook_git_creds.py   |   6 +-
 .../scripts/os/notebook_inactivity_check.py        |   6 +-
 .../general/scripts/os/notebook_install_libs.py    |   6 +-
 .../src/general/scripts/os/notebook_list_libs.py   |   6 +-
 .../os/notebook_reconfigure_dataengine_spark.py    |   8 +-
 .../scripts/os/notebook_reconfigure_spark.py       |   6 +-
 .../src/general/scripts/os/reconfigure_spark.py    |   6 +-
 .../os/rstudio_dataengine_create_configs.py        |   8 +-
 .../os/rstudio_install_dataengine_kernels.py       |  14 +-
 .../src/general/scripts/os/superset_start.py       |   6 +-
 .../os/tensor-rstudio_dataengine_create_configs.py |   8 +-
 .../tensor-rstudio_install_dataengine_kernels.py   |  14 +-
 .../scripts/os/tensor_dataengine_create_configs.py |   8 +-
 .../os/tensor_install_dataengine_kernels.py        |  14 +-
 .../scripts/os/update_inactivity_on_start.py       |   6 +-
 .../os/zeppelin_dataengine_create_configs.py       |   8 +-
 .../os/zeppelin_install_dataengine_kernels.py      |  14 +-
 .../aws/jenkins_jobs/create_data_engine/config.xml |   4 +-
 .../create_dataengine-service/config.xml           |  12 +-
 .../aws/jenkins_jobs/create_edge_node/config.xml   |   2 +-
 .../jenkins_jobs/create_notebook_image/config.xml  |   2 +-
 .../jenkins_jobs/create_notebook_server/config.xml |   2 +-
 .../config.xml                                     |   2 +-
 .../config.xml                                     |   2 +-
 .../dataengine_install_additional_libs/config.xml  |   2 +-
 .../dataengine_list_available_libs/config.xml      |   2 +-
 .../dataengine_reconfigure_spark/config.xml        |   2 +-
 .../aws/jenkins_jobs/gitlab_server/config.xml      |  12 +-
 .../jenkins_jobs/manage_git_credentials/config.xml |   2 +-
 .../notebook_install_additional_libs/config.xml    |   2 +-
 .../notebook_list_available_libs/config.xml        |   2 +-
 .../notebook_reconfigure_spark/config.xml          |   2 +-
 .../aws/jenkins_jobs/recreate_edge_node/config.xml |   2 +-
 .../aws/jenkins_jobs/reupload_ssh_key/config.xml   |   2 +-
 .../aws/jenkins_jobs/start_data_engine/config.xml  |   2 +-
 .../aws/jenkins_jobs/start_edge_node/config.xml    |   2 +-
 .../jenkins_jobs/start_notebook_server/config.xml  |   2 +-
 .../aws/jenkins_jobs/stop_data_engine/config.xml   |   2 +-
 .../aws/jenkins_jobs/stop_edge_node/config.xml     |   2 +-
 .../jenkins_jobs/stop_notebook_server/config.xml   |   2 +-
 .../jenkins_jobs/terminate_data_engine/config.xml  |   2 +-
 .../terminate_dataengine-service/config.xml        |   2 +-
 .../jenkins_jobs/terminate_edge_node/config.xml    |   2 +-
 .../terminate_notebook_image/config.xml            |   2 +-
 .../terminate_notebook_server/config.xml           |   2 +-
 .../jenkins_jobs/create_data_engine/config.xml     |   4 +-
 .../azure/jenkins_jobs/create_edge_node/config.xml |   2 +-
 .../jenkins_jobs/create_notebook_image/config.xml  |   2 +-
 .../jenkins_jobs/create_notebook_server/config.xml |   2 +-
 .../dataengine_install_additional_libs/config.xml  |   2 +-
 .../dataengine_list_available_libs/config.xml      |   2 +-
 .../dataengine_reconfigure_spark/config.xml        |   2 +-
 .../jenkins_jobs/manage_git_credentials/config.xml |   2 +-
 .../notebook_install_additional_libs/config.xml    |   2 +-
 .../notebook_list_available_libs/config.xml        |   2 +-
 .../notebook_reconfigure_spark/config.xml          |   2 +-
 .../jenkins_jobs/recreate_edge_node/config.xml     |   2 +-
 .../jenkins_jobs/start_data_engine/config.xml      |   2 +-
 .../azure/jenkins_jobs/start_edge_node/config.xml  |   2 +-
 .../jenkins_jobs/start_notebook_server/config.xml  |   2 +-
 .../azure/jenkins_jobs/stop_data_engine/config.xml |   2 +-
 .../azure/jenkins_jobs/stop_edge_node/config.xml   |   2 +-
 .../jenkins_jobs/stop_notebook_server/config.xml   |   2 +-
 .../jenkins_jobs/terminate_data_engine/config.xml  |   2 +-
 .../jenkins_jobs/terminate_edge_node/config.xml    |   2 +-
 .../terminate_notebook_image/config.xml            |   2 +-
 .../terminate_notebook_server/config.xml           |   2 +-
 .../gcp/jenkins_jobs/create_data_engine/config.xml |   4 +-
 .../create_dataengine-service/config.xml           |   4 +-
 .../gcp/jenkins_jobs/create_edge_node/config.xml   |   2 +-
 .../jenkins_jobs/create_notebook_server/config.xml |   2 +-
 .../config.xml                                     |   2 +-
 .../config.xml                                     |   2 +-
 .../dataengine_install_additional_libs/config.xml  |   2 +-
 .../dataengine_list_available_libs/config.xml      |   2 +-
 .../dataengine_reconfigure_spark/config.xml        |   2 +-
 .../gcp/jenkins_jobs/gitlab_server/config.xml      |  12 +-
 .../jenkins_jobs/manage_git_credentials/config.xml |   2 +-
 .../notebook_install_additional_libs/config.xml    |   2 +-
 .../notebook_list_available_libs/config.xml        |   2 +-
 .../notebook_reconfigure_spark/config.xml          |   2 +-
 .../gcp/jenkins_jobs/recreate_edge_node/config.xml |   2 +-
 .../gcp/jenkins_jobs/start_data_engine/config.xml  |   2 +-
 .../gcp/jenkins_jobs/start_edge_node/config.xml    |   2 +-
 .../jenkins_jobs/start_notebook_server/config.xml  |   2 +-
 .../gcp/jenkins_jobs/stop_data_engine/config.xml   |   2 +-
 .../gcp/jenkins_jobs/stop_edge_node/config.xml     |   2 +-
 .../jenkins_jobs/stop_notebook_server/config.xml   |   2 +-
 .../jenkins_jobs/terminate_data_engine/config.xml  |   2 +-
 .../terminate_dataengine-service/config.xml        |   2 +-
 .../jenkins_jobs/terminate_edge_node/config.xml    |   2 +-
 .../terminate_notebook_server/config.xml           |   2 +-
 .../templates/os/debian/spark-master.service       |   2 +-
 .../templates/os/debian/spark-slave.service        |   2 +-
 .../templates/os/redhat/spark-master.service       |   2 +-
 .../templates/os/redhat/spark-slave.service        |   2 +-
 .../src/general/templates/os/renew_certificates.sh |   6 +-
 infrastructure-provisioning/src/jupyter/fabfile.py |   6 +-
 .../src/jupyter/scripts/configure_jupyter_node.py  |   6 +-
 .../src/jupyterlab/fabfile.py                      |   6 +-
 .../scripts/configure_jupyterlab_node.py           |   6 +-
 infrastructure-provisioning/src/project/fabfile.py |   2 +-
 .../src/project/scripts/configure_http_proxy.py    |   2 +-
 .../src/project/scripts/configure_keycloak.py      |   6 +-
 .../src/project/scripts/configure_nftables.py      |   2 +-
 .../scripts/configure_nginx_reverse_proxy.py       |   4 +-
 .../src/project/scripts/reupload_ssh_key.py        |   6 +-
 .../src/project/templates/conf.d/proxy.conf        |   4 +-
 .../src/project/templates/squid.conf               |   4 +-
 infrastructure-provisioning/src/rstudio/fabfile.py |   6 +-
 .../src/rstudio/scripts/configure_rstudio_node.py  |   8 +-
 infrastructure-provisioning/src/ssn/fabfile.py     |   2 +-
 .../src/ssn/files/aws/mongo_roles.json             |  18 +-
 .../src/ssn/files/azure/mongo_roles.json           |  12 +-
 .../src/ssn/files/gcp/mongo_roles.json             |  20 +-
 .../src/ssn/scripts/backup.py                      |  32 +-
 .../src/ssn/scripts/configure_billing.py           |  22 +-
 .../src/ssn/scripts/configure_conf_file.py         |  10 +-
 .../src/ssn/scripts/configure_docker.py            |  56 +--
 .../src/ssn/scripts/configure_gitlab.py            |  12 +-
 .../src/ssn/scripts/configure_mongo.py             |  14 +-
 .../src/ssn/scripts/configure_ssn_node.py          |  92 ++---
 .../src/ssn/scripts/configure_ui.py                |  82 ++---
 .../src/ssn/scripts/docker_build.py                |   8 +-
 .../src/ssn/scripts/resource_status.py             |   4 +-
 .../src/ssn/scripts/restore.py                     |  20 +-
 .../src/ssn/scripts/upload_response_file.py        |   6 +-
 .../src/ssn/templates/gitlab.ini                   |  20 +-
 .../src/ssn/templates/nginx_proxy.conf             |   4 +-
 .../src/ssn/templates/ssn.yml                      |   4 +-
 .../src/ssn/templates/supervisor_svc.conf          |   8 +-
 .../src/superset/fabfile.py                        |   6 +-
 .../superset/scripts/configure_superset_node.py    |   8 +-
 .../src/tensor-rstudio/fabfile.py                  |   6 +-
 .../scripts/configure_tensor-rstudio_node.py       |   8 +-
 infrastructure-provisioning/src/tensor/fabfile.py  |   6 +-
 .../src/tensor/scripts/configure_tensor_node.py    |   8 +-
 .../src/zeppelin/fabfile.py                        |   6 +-
 .../zeppelin/scripts/configure_zeppelin_node.py    |   6 +-
 .../terraform/aws/endpoint/main/README.md          |   2 +-
 .../terraform/aws/endpoint/main/variables.tf       |   2 +-
 .../terraform/aws/project/main/variales.tf         |   2 +-
 .../main/datalab-billing-chart}/.helmignore        |   0
 .../main/datalab-billing-chart}/Chart.yaml         |   2 +-
 .../templates/NOTES.txt                            |   8 +-
 .../templates/_helpers.tpl                         |  12 +-
 .../templates/configmap-billing-conf.yaml          |   8 +-
 .../templates/deployment.yaml                      |  10 +-
 .../datalab-billing-chart}/templates/service.yaml  |   6 +-
 .../values.yaml                                    |   6 +-
 .../main/{dlab-billing.tf => datalab-billing.tf}   |  16 +-
 .../main/datalab-ui-chart}/.helmignore             |   0
 .../{dlab-ui-chart => datalab-ui-chart}/Chart.yaml |   2 +-
 .../main/datalab-ui-chart}/templates/NOTES.txt     |   8 +-
 .../templates/_helpers.tpl                         |  12 +-
 .../main/datalab-ui-chart}/templates/cert.yaml     |   8 +-
 .../templates/configmap-ui-conf.yaml               |  10 +-
 .../datalab-ui-chart}/templates/deployment.yaml    |  12 +-
 .../main/datalab-ui-chart}/templates/ingress.yaml  |   4 +-
 .../main/datalab-ui-chart}/templates/service.yaml  |   6 +-
 .../values.yaml                                    |   6 +-
 .../main/{dlab-ui.tf => datalab-ui.tf}             |  18 +-
 .../main/files/configure_keycloak.sh               |  10 +-
 .../main/files/keycloak_values.yaml                |   8 +-
 .../terraform/aws/ssn-helm-charts/main/keycloak.tf |   4 +-
 .../terraform/aws/ssn-helm-charts/main/main.tf     |   4 +-
 .../terraform/aws/ssn-helm-charts/main/mongo.tf    |   2 +-
 .../terraform/aws/ssn-helm-charts/main/mysql.tf    |   4 +-
 .../terraform/aws/ssn-helm-charts/main/nginx.tf    |   2 +-
 .../terraform/aws/ssn-helm-charts/main/secrets.tf  |  18 +-
 .../ssn-helm-charts/main/step-ca-chart/values.yaml |   2 +-
 .../terraform/aws/ssn-helm-charts/main/step-ca.tf  |   4 +-
 .../aws/ssn-helm-charts/main/step-issuer.tf        |   2 +-
 .../aws/ssn-helm-charts/main/variables.tf          |  14 +-
 .../terraform/aws/ssn-k8s/main/README.md           |   6 +-
 .../aws/ssn-k8s/main/files/masters-user-data.sh    |   8 +-
 .../aws/ssn-k8s/main/files/workers-user-data.sh    |   2 +-
 .../terraform/aws/ssn-k8s/main/variables.tf        |   6 +-
 .../terraform/azure/endpoint/main/variables.tf     |   2 +-
 .../terraform/bin/{dlab.py => datalab.py}          |  32 +-
 .../terraform/bin/deploy/billing_app_aws.yml       |   6 +-
 .../terraform/bin/deploy/billing_aws.yml           |  14 +-
 .../terraform/bin/deploy/billing_azure.yml         |  10 +-
 .../terraform/bin/deploy/billing_gcp.yml           |   8 +-
 .../terraform/bin/deploy/endpoint_fab.py           | 124 +++----
 .../bin/deploy/mongo_files/configure_mongo.py      |   6 +-
 .../bin/deploy/mongo_files/gcp/mongo_roles.json    |  20 +-
 .../terraform/bin/deploy/provisioning.yml          |  16 +-
 .../terraform/bin/deploy/renew_certificates.sh     |   6 +-
 .../terraform/bin/deploy/supervisor_svc.conf       |   4 +-
 .../terraform/bin/terraform-cli.py                 |  10 +-
 .../terraform/gcp/endpoint/main/variables.tf       |   4 +-
 .../terraform/gcp/endpoint/provisioning.py         | 232 ++++++-------
 .../terraform/gcp/endpoint/provisioning.yml        |  22 +-
 .../terraform/gcp/endpoint/supervisor_svc.conf     |   2 +-
 .../helm_charts/datalab-billing-chart}/.helmignore |   0
 .../helm_charts/datalab-billing-chart}/Chart.yaml  |   2 +-
 .../templates/NOTES.txt                            |   8 +-
 .../templates/_helpers.tpl                         |  12 +-
 .../templates/configmap-billing-conf.yaml          |   4 +-
 .../templates/deployment.yaml                      |  10 +-
 .../datalab-billing-chart}/templates/service.yaml  |   6 +-
 .../values.yaml                                    |   4 +-
 .../{dlab-billing.tf => datalab-billing.tf}        |  14 +-
 .../helm_charts/datalab-ui-chart}/.helmignore      |   0
 .../{dlab-ui-chart => datalab-ui-chart}/Chart.yaml |   2 +-
 .../datalab-ui-chart}/templates/NOTES.txt          |   8 +-
 .../templates/_helpers.tpl                         |  12 +-
 .../datalab-ui-chart}/templates/cert.yaml          |   8 +-
 .../templates/configmap-ui-conf.yaml               |  10 +-
 .../datalab-ui-chart}/templates/deployment.yaml    |  12 +-
 .../datalab-ui-chart}/templates/ingress.yaml       |   4 +-
 .../datalab-ui-chart}/templates/service.yaml       |   6 +-
 .../values.yaml                                    |   6 +-
 .../helm_charts/{dlab-ui.tf => datalab-ui.tf}      |  22 +-
 .../main/modules/helm_charts/external-dns.tf       |   4 +-
 .../external-dns/templates/externaldns.yaml        |   2 +-
 .../helm_charts/files/configure_keycloak.sh        |  10 +-
 .../helm_charts/files/get_configmap_values.sh      |   6 +-
 .../modules/helm_charts/files/keycloak_values.yaml |   8 +-
 .../ssn-gke/main/modules/helm_charts/keycloak.tf   |   4 +-
 .../gcp/ssn-gke/main/modules/helm_charts/main.tf   |   2 +-
 .../gcp/ssn-gke/main/modules/helm_charts/mongo.tf  |   4 +-
 .../gcp/ssn-gke/main/modules/helm_charts/mysql.tf  |   2 +-
 .../nginx-ingress/templates/nginx-ingress.yaml     |   4 +-
 .../gcp/ssn-gke/main/modules/helm_charts/nginx.tf  |   6 +-
 .../ssn-gke/main/modules/helm_charts/outputs.tf    |   2 +-
 .../ssn-gke/main/modules/helm_charts/secrets.tf    |  18 +-
 .../modules/helm_charts/step-ca-chart/values.yaml  |   2 +-
 .../ssn-gke/main/modules/helm_charts/step-ca.tf    |   6 +-
 .../main/modules/helm_charts/step-issuer.tf        |   2 +-
 .../terraform/gcp/ssn-gke/main/variables.tf        |  14 +-
 .../terraform/keycloak-theme/Dockerfile            |   2 +-
 integration-tests/README.MD                        |   4 +-
 services/readme.txt                                |  12 +-
 .../manage-environment-dilog.component.html        |   2 +-
 508 files changed, 4109 insertions(+), 4109 deletions(-)

diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 7b584f1..b353752 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -19,14 +19,14 @@ too old-fashioned, create a PR and move us to [Gradle](https://www.gradle.org/)!
 If you're interested in making it better, check-out `gh-pages` branch and dig in.
 If you are not familiar with the [Github Pages](https://dlab.apache.org/) - check it out,
 it's pretty simple yet powerful!
-* __giving feedback__ : Tell us how you use DLab, what was great and what was not 
+* __giving feedback__ : Tell us how you use Data Lab, what was great and what was not 
 so much. Also, what are you expecting from it and what would you like to see in 
 the future? Opening [an issue](https://github.com/apache/incubator-dlab/issues) will grab our
 attention. Seriously, this is the great way to contribute!
 
 #### Roles
 Much like projects in [ASF](https://www.apache.org/foundation/how-it-works.html#roles), 
-DLab recognizes a few roles. Unlike ASF's projects, our structure is a way simpler.
+Data Lab recognizes a few roles. Unlike ASF's projects, our structure is a way simpler.
 There are only two types:
   * __A Contributor__ is a user who contributes to a project in the form of code 
   	or documentation. Developers take extra steps to participate in a project,
@@ -45,7 +45,7 @@ There are only two types:
 
 #### RTC model
 
-DLab supports Review-Then-Commit model of development. The following rules are 
+Data Lab supports Review-Then-Commit model of development. The following rules are 
 used in the RTC process:
   * a developer should seek peer-review and/or feedback from other developers
   	through the PR mechanism (aka code review).
diff --git a/README.md b/README.md
index fad77b5..afebcf6 100644
--- a/README.md
+++ b/README.md
@@ -1,11 +1,11 @@
-DLAB Overview
+Data Lab Overview
 =============
 
 -------
 CONTENTS
 -------
 
-[What is DLAB?](#What_is_DLAB)
+[What is Data Lab?](#What_is_DLAB)
 
 &nbsp; &nbsp; [How to Contribute](CONTRIBUTING.md)
 
@@ -13,13 +13,13 @@ CONTENTS
 
 [Physical architecture](#Physical_architecture)
 
-[DLab Deployment](#DLab_Deployment)
+[Data Lab Deployment](#DLab_Deployment)
 
-&nbsp; &nbsp; &nbsp; &nbsp; [Structure of main DLab directory](#DLab_directory)
+&nbsp; &nbsp; &nbsp; &nbsp; [Structure of main Data Lab directory](#DLab_directory)
 
 &nbsp; &nbsp; &nbsp; &nbsp; [Structure of log directory](#log_directory)
 
-&nbsp; &nbsp; &nbsp; &nbsp; [Preparing environment for DLab deployment](#Env_for_DLab)
+&nbsp; &nbsp; &nbsp; &nbsp; [Preparing environment for Data Lab deployment](#Env_for_DLab)
 
 &nbsp; &nbsp; &nbsp; &nbsp; [Keycloak server](#Keycloak_server)
 
@@ -66,9 +66,9 @@ CONTENTS
 &nbsp; &nbsp; &nbsp; &nbsp; [Azure OAuth2 Authentication](#Azure_OAuth2_Authentication)
 
 ---------------
-# What is DLAB? <a name="What_is_DLAB"></a>
+# What is Data Lab? <a name="What_is_DLAB"></a>
 
-DLab is an essential toolset for analytics. It is a self-service Web Console, used to create and manage exploratory 
+Data Lab is an essential toolset for analytics. It is a self-service Web Console, used to create and manage exploratory 
 environments. It allows teams to spin up analytical environments with best of breed open-source tools just with a 
 single click of the mouse. Once established, environment can be managed by an analytical team itself, leveraging simple 
 and easy-to-use Web Interface.
@@ -81,7 +81,7 @@ The following diagram demonstrate high-level logical architecture.
 
 ![Logical architecture](doc/logical_architecture.png)
 
-The diagram shows main components of DLab, which is a self-service for the infrastructure deployment and interaction 
+The diagram shows main components of Data Lab, which is a self-service for the infrastructure deployment and interaction 
 with it. The purpose of each component is described below.
 
 ## Self-Service
@@ -119,19 +119,19 @@ Database serves as a storage with description of user infrastructure, user’s s
 -----------------------------
 # Physical architecture <a name="Physical_architecture"></a>
 
-The following diagrams demonstrate high-level physical architecture of DLab in AWS, GCP and Azure.
+The following diagrams demonstrate high-level physical architecture of Data Lab in AWS, GCP and Azure.
 
-### Dlab high level Architecture on AWS: 
+### Data Lab high level Architecture on AWS: 
 
-![Physical architecture](doc/dlab_aws.png)
+![Physical architecture](doc/datalab_aws.png)
 
-### Dlab high level Architecture on GCP:
+### Data Lab high level Architecture on GCP:
 
-![Physical architecture](doc/dlab_gcp.png)
+![Physical architecture](doc/datalab_gcp.png)
 
-### Dlab high level Architecture on Azure:
+### Data Lab high level Architecture on Azure:
 
-![Physical architecture](doc/dlab_azure.png)
+![Physical architecture](doc/datalab_azure.png)
 
 ## Main components
 
@@ -144,20 +144,20 @@ The following diagrams demonstrate high-level physical architecture of DLab in A
 
 ## Self-service node (SSN)
 
-Creation of self-service node – is the first step for deploying DLab. SSN is a main server with following pre-installed services:
+Creation of self-service node – is the first step for deploying Data Lab. SSN is a main server with following pre-installed services:
 
--   DLab Web UI – is Web user interface for managing/deploying all components of DLab. It is accessible by the 
+-   Data Lab Web UI – is Web user interface for managing/deploying all components of Data Lab. It is accessible by the 
     following URL: http[s]://SSN\_Public\_IP\_or\_Public\_DNS
--   MongoDB – is a database, which contains part of DLab’s configuration, user’s exploratory environments description 
+-   MongoDB – is a database, which contains part of Data Lab’s configuration, user’s exploratory environments description 
     as well as user’s preferences.
--   Docker – used for building DLab Docker containers, which will be used for provisioning other components.
+-   Docker – used for building Data Lab Docker containers, which will be used for provisioning other components.
 
 Elastic(Static) IP address is assigned to an SSN Node, so you are free to stop|start it and and SSN node's IP address 
 won’t change.
 
 ## Endpoint
 
-This is a node which serves as a provisioning endpoint for Dlab resources. Endpoint machine is deployed separately from Dlab
+This is a node which serves as a provisioning endpoint for Data Lab resources. Endpoint machine is deployed separately from Data Lab
 installation and can be even deployed on a different cloud.
 
 ## Edge node
@@ -169,7 +169,7 @@ Edge Node has a Nginx reverse-proxy pre-installed.
 
 The next step is setting up a Notebook node (or a Notebook server). It is a server with pre-installed applications and 
 libraries for data processing, data cleaning and transformations, numerical simulations, statistical modeling, machine 
-learning, etc. Following analytical tools are currently supported in DLab and can be installed on a Notebook node:
+learning, etc. Following analytical tools are currently supported in Data Lab and can be installed on a Notebook node:
 
 -   Jupyter
 -   Jupyterlab
@@ -193,14 +193,14 @@ That simplifies running big data frameworks, such as Apache Hadoop and Apache Sp
 of data. Adding cluster is not mandatory and is only needed in case additional computational resources are required for 
 job execution.
 ----------------------
-# DLab Deployment <a name="DLab_Deployment"></a>
+# Data Lab Deployment <a name="DLab_Deployment"></a>
 
-### Structure of main DLab directory <a name="DLab_directory"></a>
+### Structure of main Data Lab directory <a name="DLab_directory"></a>
 
-DLab’s SSN node main directory structure is as follows:
+Data Lab’s SSN node main directory structure is as follows:
 
     /opt  
-     └───dlab  
+     └───datalab  
          ├───conf  
          ├───sources  
          ├───template  
@@ -208,12 +208,12 @@ DLab’s SSN node main directory structure is as follows:
          │   └───result  
          └───webapp  
 
--   conf – contains configuration for DLab Web UI and back-end services;
+-   conf – contains configuration for Data Lab Web UI and back-end services;
 -   sources – contains all Docker/Python scripts, templates and files for provisioning;
 -   template – docker’s templates;
--   tmp –temporary directory of DLab;
+-   tmp –temporary directory of Data Lab;
 -   tmp/result – temporary directory for Docker’s response files;
--   webapp – contains all .jar files for DLab Web UI and back-end
+-   webapp – contains all .jar files for Data Lab Web UI and back-end
     services.
 
 ### Structure of log directory <a name="log_directory"></a>
@@ -222,7 +222,7 @@ SSN node structure of log directory is as follows:
 
     /var
      └───opt
-         └───dlab
+         └───datalab
              └───log
                  ├───dataengine
                  ├───dateengine-service
@@ -231,7 +231,7 @@ SSN node structure of log directory is as follows:
                  ├───project
                  └───ssn
 
-These directories contain the log files for each template and for DLab back-end services.
+These directories contain the log files for each template and for Data Lab back-end services.
 -   ssn – contains logs of back-end services;
 -   provisioning.log – Provisioning Service log file;
 -   security.log – Security Service log file;
@@ -241,8 +241,8 @@ These directories contain the log files for each template and for DLab back-end
 ## Keycloak server <a name="Keycloak_server"></a>
 
 **Keycloak** is used to manage user authentication instead of the aplication. To use existing server following 
-  parameters must be specified either when running *Dlab* deployment script or in 
-*/opt/dlab/conf/self-service.yml* and */opt/dlab/conf/provisioning.yml* files on SSN node.
+  parameters must be specified either when running *Data Lab* deployment script or in 
+*/opt/datalab/conf/self-service.yml* and */opt/datalab/conf/provisioning.yml* files on SSN node.
 
 | Parameter                | Description/Value             |
 |--------------------------|-------------------------------|
@@ -263,7 +263,7 @@ Preparation steps for deployment:
     - Boot disk OS Image - Ubuntu 18.04
 - Put private key that is used to connect to instance where Keycloak will be deployed somewhere on the instance where 
   deployment script will be executed.
-- Install Git and clone DLab repository</details>
+- Install Git and clone Data Lab repository</details>
 ### Executing deployment script
 To build Keycloak node, following steps should be executed:
 - Connect to the instance via SSH and run the following commands:
@@ -273,7 +273,7 @@ apt-get update
 apt-get install -y python-pip
 pip install fabric==1.14.0
 ```
-- Go to *dlab* directory
+- Go to *datalab* directory
 - Run *infrastructure-provisioning/scripts/deploy_keycloak/deploy_keycloak.py* deployment script:
 
 ```
@@ -294,9 +294,9 @@ List of parameters for Keycloak node deployment:
 
 ## Self-Service Node <a name="Self_Service_Node"></a>
 
-### Preparing environment for DLab deployment <a name="Env_for_DLab"></a>
+### Preparing environment for Data Lab deployment <a name="Env_for_DLab"></a>
 
-Deployment of DLab starts from creating Self-Service(SSN) node. DLab can be deployed in AWS, Azure and Google cloud.
+Deployment of Data Lab starts from creating Self-Service(SSN) node. Data Lab can be deployed in AWS, Azure and Google cloud.
 
 For each cloud provider, prerequisites are different.
 
@@ -304,32 +304,32 @@ For each cloud provider, prerequisites are different.
 
 Prerequisites:
 
-DLab can be deployed using the following two methods:
- - IAM user: DLab deployment script is executed on local machine and uses IAM user permissions to create resources in AWS.
- - EC2 instance: DLab deployment script is executed on EC2 instance prepared in advance and with attached IAM role. 
+Data Lab can be deployed using the following two methods:
+ - IAM user: Data Lab deployment script is executed on local machine and uses IAM user permissions to create resources in AWS.
+ - EC2 instance: Data Lab deployment script is executed on EC2 instance prepared in advance and with attached IAM role. 
    Deployment script uses the attached IAM role to create resources in AWS.
 
 **'IAM user' method prerequisites:**  
  
  - IAM user with created AWS access key ID and secret access key. These keys are provided as arguments for the 
    deployment script and are used to create resources in AWS.
- - Amazon EC2 Key Pair. This key is system and is used for configuring DLab instances.
- - The following IAM [policy](#AWS_SSN_policy) should be attached to the IAM user in order to deploy DLab.
+ - Amazon EC2 Key Pair. This key is system and is used for configuring Data Lab instances.
+ - The following IAM [policy](#AWS_SSN_policy) should be attached to the IAM user in order to deploy Data Lab.
  
  **'EC2 instance' method prerequisites:**
  
- - Amazon EC2 Key Pair. This key is system and is used for configuring DLab instances.
- - EC2 instance where DLab deployment script is executed. 
+ - Amazon EC2 Key Pair. This key is system and is used for configuring Data Lab instances.
+ - EC2 instance where Data Lab deployment script is executed. 
  - IAM role with the following IAM [policy](#AWS_SSN_policy) should be attached to the EC2 instance. 
  
  **Optional prerequisites for both methods:**
   
-  - VPC ID. If VPC where DLab should be deployed is already in place, then "VPC ID" should be provided for deployment 
-    script. DLab instances are deployed in this VPC.
-  - Subnet ID. If Subnet where DLab should be deployed is already in place, then "Subnet ID" should be provided for 
-    deployment script. DLab SSN node and users' Edge nodes are deployed in this Subnet. 
+  - VPC ID. If VPC where Data Lab should be deployed is already in place, then "VPC ID" should be provided for deployment 
+    script. Data Lab instances are deployed in this VPC.
+  - Subnet ID. If Subnet where Data Lab should be deployed is already in place, then "Subnet ID" should be provided for 
+    deployment script. Data Lab SSN node and users' Edge nodes are deployed in this Subnet. 
  
- DLab IAM Policy
+ Data Lab IAM Policy
  <a name="AWS_SSN_policy"></a>
 ```
 {
@@ -433,11 +433,11 @@ Preparation steps for deployment:
 
 - Create an EC2 instance with the following settings:
     - The instance should have access to Internet in order to install required prerequisites
-    - The instance should have access to further DLab installation
+    - The instance should have access to further Data Lab installation
     - AMI - Ubuntu 16.04
     - IAM role with [policy](#AWS_SSN_policy) should be assigned to the instance
 - Put SSH key file created through Amazon Console on the instance with the same name
-- Install Git and clone DLab repository</details>
+- Install Git and clone Data Lab repository</details>
 
 <details><summary>In Azure cloud <i>(click to expand)</i></summary>
 
@@ -478,7 +478,7 @@ Preparation steps for deployment:
     - Boot disk OS Image - Ubuntu 16.04
 - Generate SSH key pair and rename private key with .pem extension
 - Put JSON auth file created through Google cloud console to users home directory
-- Install Git and clone DLab repository</details>
+- Install Git and clone Data Lab repository</details>
 
 ### Executing deployment script
 
@@ -501,16 +501,16 @@ apt-get install -y python-pip
 pip install fabric==1.14.0
 cd incubator-dlab
 ```
-- Go to *dlab* directory
-- Run *infrastructure-provisioning/scripts/deploy_dlab.py* deployment script:
+- Go to *datalab* directory
+- Run *infrastructure-provisioning/scripts/deploy_datalab.py* deployment script:
 
-This python script will build front-end and back-end part of DLab, create SSN docker image and run Docker container 
+This python script will build front-end and back-end part of Data Lab, create SSN docker image and run Docker container 
 for creating SSN node.
 
 <details><summary>In Amazon cloud <i>(click to expand)</i></summary>
 
 ```
-/usr/bin/python infrastructure-provisioning/scripts/deploy_dlab.py --conf_service_base_name dlab-test --aws_access_key XXXXXXX --aws_secret_access_key XXXXXXXXXX --aws_region xx-xxxxx-x --conf_os_family debian --conf_cloud_provider aws --aws_vpc_id vpc-xxxxx --aws_subnet_id subnet-xxxxx --aws_security_groups_ids sg-xxxxx,sg-xxxx --key_path /path/to/key/ --conf_key_name key_name --conf_tag_resource_id dlab --aws_account_id xxxxxxxx --aws_billing_bucket billing_bucket --aws_report_path /bi [...]
+/usr/bin/python infrastructure-provisioning/scripts/deploy_datalab.py --conf_service_base_name datalab-test --aws_access_key XXXXXXX --aws_secret_access_key XXXXXXXXXX --aws_region xx-xxxxx-x --conf_os_family debian --conf_cloud_provider aws --aws_vpc_id vpc-xxxxx --aws_subnet_id subnet-xxxxx --aws_security_groups_ids sg-xxxxx,sg-xxxx --key_path /path/to/key/ --conf_key_name key_name --conf_tag_resource_id datalab --aws_account_id xxxxxxxx --aws_billing_bucket billing_bucket --aws_report [...]
 ```
 
 List of parameters for SSN node deployment:
@@ -521,9 +521,9 @@ List of parameters for SSN node deployment:
 | aws\_access\_key          | AWS user access key                                                                     |
 | aws\_secret\_access\_key  | AWS user secret access key                                                              |
 | aws\_region               | AWS region                                                                              |
-| conf\_os\_family          | Name of the Linux distributive family, which is supported by DLab (Debian/RedHat)       |
-| conf\_cloud\_provider     | Name of the cloud provider, which is supported by DLab (AWS)
-| conf\_duo\_vpc\_enable    | "true" - for installing DLab into two Virtual Private Clouds (VPCs) or "false" - for installing DLab into one VPC. Also this parameter isn't required when deploy DLab in one VPC|
+| conf\_os\_family          | Name of the Linux distributive family, which is supported by Data Lab (Debian/RedHat)       |
+| conf\_cloud\_provider     | Name of the cloud provider, which is supported by Data Lab (AWS)
+| conf\_duo\_vpc\_enable    | "true" - for installing Data Lab into two Virtual Private Clouds (VPCs) or "false" - for installing Data Lab into one VPC. Also this parameter isn't required when deploy Data Lab in one VPC|
 | aws\_vpc\_id              | ID of the VPC (optional)                                                    |
 | aws\_subnet\_id           | ID of the public subnet (optional)                                                                  |
 | aws\_security\_groups\_ids| One or more ID\`s of AWS Security Groups, which will be assigned to SSN node (optional)             |
@@ -534,7 +534,7 @@ List of parameters for SSN node deployment:
 | aws\_billing\_bucket      | The name of S3 bucket where billing reports will be placed                              |
 | aws\_report\_path         | The path to billing reports directory in S3 bucket. This parameter isn't required when billing reports are placed in the root of S3 bucket. |
 | action                    | In case of SSN node creation, this parameter should be set to “create”|
-| workspace\_path           | Path to DLab sources root
+| workspace\_path           | Path to Data Lab sources root
 | conf\_image\_enabled      | Enable or Disable creating image at first time |
 
 **Note:** If the following parameters are not specified, they will be created automatically:
@@ -555,12 +555,12 @@ List of parameters for SSN node deployment:
 -   Security Group for SSN node (if it was specified, script will attach the provided one)
 -   VPC, Subnet (if they have not been specified) for SSN and EDGE nodes
    S3 bucket – its name will be \<service\_base\_name\>-ssn-bucket. This bucket will contain necessary dependencies and configuration files for Notebook nodes (such as .jar files, YARN configuration, etc.)
--   S3 bucket for for collaboration between Dlab users. Its name will be \<service\_base\_name\>-\<endpoint\_name\>-shared-bucket</details>
+-   S3 bucket for for collaboration between Data Lab users. Its name will be \<service\_base\_name\>-\<endpoint\_name\>-shared-bucket</details>
 
 <details><summary>In Azure cloud <i>(click to expand)</i></summary>
 
 ```
-/usr/bin/python infrastructure-provisioning/scripts/deploy_dlab.py --conf_service_base_name dlab_test --azure_region westus2 --conf_os_family debian --conf_cloud_provider azure --azure_vpc_name vpc-test --azure_subnet_name subnet-test --azure_security_group_name sg-test1,sg-test2 --key_path /root/ --conf_key_name Test --azure_auth_path /dir/file.json  --action create
+/usr/bin/python infrastructure-provisioning/scripts/deploy_datalab.py --conf_service_base_name datalab_test --azure_region westus2 --conf_os_family debian --conf_cloud_provider azure --azure_vpc_name vpc-test --azure_subnet_name subnet-test --azure_security_group_name sg-test1,sg-test2 --key_path /root/ --conf_key_name Test --azure_auth_path /dir/file.json  --action create
 ```
 
 List of parameters for SSN node deployment:
@@ -570,8 +570,8 @@ List of parameters for SSN node deployment:
 | conf\_service\_base\_name         | Any infrastructure value (should be unique if multiple SSN’s have been deployed before) |
 | azure\_resource\_group\_name      | Resource group name (can be the same as service base name                             |
 | azure\_region                     | Azure region                                                                            |
-| conf\_os\_family                  | Name of the Linux distributive family, which is supported by DLab (Debian/RedHat)       |
-| conf\_cloud\_provider             | Name of the cloud provider, which is supported by DLab (Azure)                          |
+| conf\_os\_family                  | Name of the Linux distributive family, which is supported by Data Lab (Debian/RedHat)       |
+| conf\_cloud\_provider             | Name of the cloud provider, which is supported by Data Lab (Azure)                          |
 | azure\_vpc\_name                  | Name of the Virtual Network (VN) (optional)                                                         |
 | azure\_subnet\_name               | Name of the Azure subnet (optional)                                                                 |
 | azure\_security\_groups\_name     | One or more Name\`s of Azure Security Groups, which will be assigned to SSN node (optional)         |
@@ -585,8 +585,8 @@ List of parameters for SSN node deployment:
 | azure\_region\_info               | Region info that is used for billing information(e.g. US)                               |
 | azure\_datalake\_enable           | Support of Azure Data Lake (true/false)                                                 |
 | azure\_oauth2\_enabled            | Defines if Azure OAuth2 authentication mechanisms is enabled(true/false)                |
-| azure\_validate\_permission\_scope| Defines if DLab verifies user's permission to the configured resource(scope) during login with OAuth2 (true/false). If Data Lake is enabled default scope is Data Lake Store Account, else Resource Group, where DLab is deployed, is default scope. If user does not have any role in scope he/she is forbidden to log in
-| azure\_application\_id            | Azure application ID that is used to log in users in DLab                                                     |
+| azure\_validate\_permission\_scope| Defines if Data Lab verifies user's permission to the configured resource(scope) during login with OAuth2 (true/false). If Data Lake is enabled default scope is Data Lake Store Account, else Resource Group, where Data Lab is deployed, is default scope. If user does not have any role in scope he/she is forbidden to log in
+| azure\_application\_id            | Azure application ID that is used to log in users in Data Lab                                                     |
 | azure\_ad\_group\_id              | ID of group in Active directory whose members have full access to shared folder in Azure Data Lake Store                                                                          |
 | action                            | In case of SSN node creation, this parameter should be set to “create”                  |
 | conf\_image\_enabled      | Enable or Disable creating image at first time |
@@ -605,7 +605,7 @@ click Overview and you should see it under Offer ID property:
 ![Azure offer number](doc/azure_offer_number.png)
 
 Please see [RateCard API](https://msdn.microsoft.com/en-us/library/mt219004.aspx) to get more details about 
-azure\_offer\_number, azure\_currency, azure\_locale, azure\_region_info. These DLab deploy properties correspond to 
+azure\_offer\_number, azure\_currency, azure\_locale, azure\_region_info. These Data Lab deploy properties correspond to 
 RateCard API request parameters.
 
 To have working billing functionality please review Billing configuration note and use proper parameters for SSN node 
@@ -628,26 +628,26 @@ deployment.
 - Get *Application ID* from application properties  it will be used as azure_application_id for deploy_dlap.py script
 2. Usage of Data Lake resource predicts shared folder where all users can write or read any data. To manage access to 
    this folder please create ot use existing group in Active Directory. All users from this group will have RW access to 
-   the shared folder. Put ID(in Active Directory) of the group as *azure_ad_group_id* parameter to deploy_dlab.py script
-3. After execution of deploy_dlab.py script go to the application created in step 1 and change *Redirect URIs* value to 
+   the shared folder. Put ID(in Active Directory) of the group as *azure_ad_group_id* parameter to deploy_datalab.py script
+3. After execution of deploy_datalab.py script go to the application created in step 1 and change *Redirect URIs* value to 
    the https://SSN_HOSTNAME/ where SSN_HOSTNAME - SSN node hostname
 
 After SSN node deployment following Azure resources will be created:
 
--   Resource group where all DLAb resources will be provisioned
+-   Resource group where all Data Lab resources will be provisioned
 -   SSN Virtual machine
 -   Static public IP address dor SSN virtual machine
 -   Network interface for SSN node
 -   Security Group for SSN node (if it was specified, script will attach the provided one)
 -   Virtual network and Subnet (if they have not been specified) for SSN and EDGE nodes
 -   Storage account and blob container for necessary further dependencies and configuration files for Notebook nodes (such as .jar files, YARN configuration, etc.)
--   Storage account and blob container for collaboration between Dlab users
+-   Storage account and blob container for collaboration between Data Lab users
 -   If support of Data Lake is enabled: Data Lake and shared directory will be created</details>
 
 <details><summary>In Google cloud (GCP) <i>(click to expand)</i></summary>
 
 ```
-/usr/bin/python infrastructure-provisioning/scripts/deploy_dlab.py --conf_service_base_name dlab-test --gcp_region xx-xxxxx --gcp_zone xxx-xxxxx-x --conf_os_family debian --conf_cloud_provider gcp --key_path /path/to/key/ --conf_key_name key_name --gcp_ssn_instance_size n1-standard-1 --gcp_project_id project_id --gcp_service_account_path /path/to/auth/file.json --action create
+/usr/bin/python infrastructure-provisioning/scripts/deploy_datalab.py --conf_service_base_name datalab-test --gcp_region xx-xxxxx --gcp_zone xxx-xxxxx-x --conf_os_family debian --conf_cloud_provider gcp --key_path /path/to/key/ --conf_key_name key_name --gcp_ssn_instance_size n1-standard-1 --gcp_project_id project_id --gcp_service_account_path /path/to/auth/file.json --action create
 ```
 
 List of parameters for SSN node deployment:
@@ -657,8 +657,8 @@ List of parameters for SSN node deployment:
 | conf\_service\_base\_name    | Any infrastructure value (should be unique if multiple SSN’s have been deployed before)|
 | gcp\_region                  | GCP region                                                                            |
 | gcp\_zone                    | GCP zone                                                                              |
-| conf\_os\_family             | Name of the Linux distributive family, which is supported by DLab (Debian/RedHat)     |
-| conf\_cloud\_provider        | Name of the cloud provider, which is supported by DLab (GCP)                          |
+| conf\_os\_family             | Name of the Linux distributive family, which is supported by Data Lab (Debian/RedHat)     |
+| conf\_cloud\_provider        | Name of the cloud provider, which is supported by Data Lab (GCP)                          |
 | gcp\_vpc\_name               | Name of the Virtual Network (VN) (optional)                                           |
 | gcp\_subnet\_name            | Name of the GCP subnet (optional)                                                     |
 | gcp\_firewall\_name          | One or more Name\`s of GCP Security Groups, which will be assigned to SSN node (optional)|
@@ -681,19 +681,19 @@ After SSN node deployment following GCP resources will be created:
 -   IAM role and Service account for SSN
 -   Security Groups for SSN node (if it was specified, script will attach the provided one)
 -   VPC, Subnet (if they have not been specified) for SSN and EDGE nodes
--   Bucket for for collaboration between Dlab users. Its name will be 
+-   Bucket for for collaboration between Data Lab users. Its name will be 
     \<service\_base\_name\>-\<endpoint\_name\>-shared-bucket</details>
 
 ### Terminating Self-Service Node
 
 Terminating SSN node will also remove all nodes and components related to it. Basically, terminating Self-service node 
-will terminate all DLab’s infrastructure.
-Example of command for terminating DLab environment:
+will terminate all Data Lab’s infrastructure.
+Example of command for terminating Data Lab environment:
 
 <details><summary>In Amazon <i>(click to expand)</i></summary>
 
 ```
-/usr/bin/python infrastructure-provisioning/scripts/deploy_dlab.py --conf_service_base_name dlab-test --aws_access_key XXXXXXX --aws_secret_access_key XXXXXXXX --aws_region xx-xxxxx-x --key_path /path/to/key/ --conf_key_name key_name --conf_os_family debian --conf_cloud_provider aws --action terminate
+/usr/bin/python infrastructure-provisioning/scripts/deploy_datalab.py --conf_service_base_name datalab-test --aws_access_key XXXXXXX --aws_secret_access_key XXXXXXXX --aws_region xx-xxxxx-x --key_path /path/to/key/ --conf_key_name key_name --conf_os_family debian --conf_cloud_provider aws --action terminate
 ```
 List of parameters for SSN node termination:
 
@@ -705,15 +705,15 @@ List of parameters for SSN node termination:
 | aws\_region                | AWS region                                                                         |
 | key\_path                  | Path to admin key (without key name)                                               |
 | conf\_key\_name            | Name of the uploaded SSH key file (without “.pem” extension)                       |
-| conf\_os\_family           | Name of the Linux distributive family, which is supported by DLab (Debian/RedHat)  |
-| conf\_cloud\_provider      | Name of the cloud provider, which is supported by DLab (AWS)                       |
+| conf\_os\_family           | Name of the Linux distributive family, which is supported by Data Lab (Debian/RedHat)  |
+| conf\_cloud\_provider      | Name of the cloud provider, which is supported by Data Lab (AWS)                       |
 | action                     | terminate                                                                          |
 </details>
 
 <details><summary>In Azure <i>(click to expand)</i></summary>
 
 ```
-/usr/bin/python infrastructure-provisioning/scripts/deploy_dlab.py --conf_service_base_name dlab-test --azure_vpc_name vpc-test --azure_resource_group_name resource-group-test --azure_region westus2 --key_path /root/ --conf_key_name Test --conf_os_family debian --conf_cloud_provider azure --azure_auth_path /dir/file.json --action terminate
+/usr/bin/python infrastructure-provisioning/scripts/deploy_datalab.py --conf_service_base_name datalab-test --azure_vpc_name vpc-test --azure_resource_group_name resource-group-test --azure_region westus2 --key_path /root/ --conf_key_name Test --conf_os_family debian --conf_cloud_provider azure --azure_auth_path /dir/file.json --action terminate
 ```
 List of parameters for SSN node termination:
 
@@ -721,8 +721,8 @@ List of parameters for SSN node termination:
 |----------------------------|------------------------------------------------------------------------------------|
 | conf\_service\_base\_name  | Unique infrastructure value                                                        |
 | azure\_region              | Azure region                                                                       |
-| conf\_os\_family           | Name of the Linux distributive family, which is supported by DLab (Debian/RedHat)  |
-| conf\_cloud\_provider      | Name of the cloud provider, which is supported by DLab (Azure)                     |
+| conf\_os\_family           | Name of the Linux distributive family, which is supported by Data Lab (Debian/RedHat)  |
+| conf\_cloud\_provider      | Name of the cloud provider, which is supported by Data Lab (Azure)                     |
 | azure\_vpc\_name           | Name of the Virtual Network (VN)                                                   |
 | key\_path                  | Path to admin key (without key name)                                               |
 | conf\_key\_name            | Name of the uploaded SSH key file (without “.pem” extension)                       |
@@ -733,7 +733,7 @@ List of parameters for SSN node termination:
 <details><summary>In Google cloud <i>(click to expand)</i></summary>
 
 ```
-/usr/bin/python infrastructure-provisioning/scripts/deploy_dlab.py --gcp_project_id project_id --conf_service_base_name dlab-test --gcp_region xx-xxxxx --gcp_zone xx-xxxxx-x --key_path /path/to/key/ --conf_key_name key_name --conf_os_family debian --conf_cloud_provider gcp --gcp_service_account_path /path/to/auth/file.json --action terminate
+/usr/bin/python infrastructure-provisioning/scripts/deploy_datalab.py --gcp_project_id project_id --conf_service_base_name datalab-test --gcp_region xx-xxxxx --gcp_zone xx-xxxxx-x --key_path /path/to/key/ --conf_key_name key_name --conf_os_family debian --conf_cloud_provider gcp --gcp_service_account_path /path/to/auth/file.json --action terminate
 ```
 List of parameters for SSN node termination:
 
@@ -742,8 +742,8 @@ List of parameters for SSN node termination:
 | conf\_service\_base\_name    | Any infrastructure value (should be unique if multiple SSN’s have been deployed before)|
 | gcp\_region                  | GCP region                                                                            |
 | gcp\_zone                    | GCP zone                                                                              |
-| conf\_os\_family             | Name of the Linux distributive family, which is supported by DLab (Debian/RedHat)     |
-| conf\_cloud\_provider        | Name of the cloud provider, which is supported by DLab (GCP)                          |
+| conf\_os\_family             | Name of the Linux distributive family, which is supported by Data Lab (Debian/RedHat)     |
+| conf\_cloud\_provider        | Name of the cloud provider, which is supported by Data Lab (GCP)                          |
 | gcp\_vpc\_name               | Name of the Virtual Network (VN) (optional)                                           |
 | gcp\_subnet\_name            | Name of the GCP subnet (optional)                                                     |
 | key\_path                    | Path to admin key (without key name)                                                  |
@@ -760,13 +760,13 @@ pre-defined VPC and Subnet.
 
 Gateway node (or an Edge node) is an instance(virtual machine) provisioned in a public subnet. It serves as an entry 
 point for accessing user’s personal analytical environment. It is created by an end-user, whose public key will be 
-uploaded there. Only via Edge node, DLab user can access such application resources as notebook servers and dataengine 
+uploaded there. Only via Edge node, Data Lab user can access such application resources as notebook servers and dataengine 
 clusters. Also, Edge Node is used to setup SOCKS proxy to access notebook servers via Web UI and SSH. Elastic(Static) 
 IP address is assigned to an Edge Node. 
 
 ### Create
 
-In order to create Edge node using DLab Web UI – login and, click on the button “Upload” (Depending on authorization 
+In order to create Edge node using Data Lab Web UI – login and, click on the button “Upload” (Depending on authorization 
 provider that was chosen on deployment stage, user may be taken from [LDAP](#LDAP_Authentication) or from 
 [Azure AD (Oauth2)](#Azure_OAuth2_Authentication)). Choose user’s SSH public key and after that click on the button 
 “Create”. Edge node will be deployed and corresponding instance (virtual machine) will be started.
@@ -790,7 +790,7 @@ List of parameters for Edge node creation:
 | Parameter                  | Description/Value                                                                     |
 |--------------------------------|-----------------------------------------------------------------------------------|
 | conf\_resource                 | edge                                                                              |
-| conf\_os\_family               | Name of the Linux distributive family, which is supported by DLAB (debian/redhat) |
+| conf\_os\_family               | Name of the Linux distributive family, which is supported by Data Lab (debian/redhat) |
 | conf\_service\_base\_name      | Unique infrastructure value, specified during SSN deployment                      |
 | conf\_key\_name                | Name of the uploaded SSH key file (without ".pem")                                |
 | edge\_user\_name               | Name of the user                                                                  |
@@ -822,11 +822,11 @@ List of parameters for Edge node creation:
 | Parameter                  | Description/Value                                                                     |
 |--------------------------------|-----------------------------------------------------------------------------------|
 | conf\_resource                 | edge                                                                              |
-| conf\_os\_family               | Name of the Linux distributive family, which is supported by DLAB (debian/redhat) |
+| conf\_os\_family               | Name of the Linux distributive family, which is supported by Data Lab (debian/redhat) |
 | conf\_service\_base\_name      | Unique infrastructure value, specified during SSN deployment                      |
 | conf\_key\_name                | Name of the uploaded SSH key file (without ".pem")                                |
 | edge\_user\_name               | Name of the user                                                                  |
-| azure\_resource\_group\_name   | Name of the resource group where all DLAb resources are being provisioned         |
+| azure\_resource\_group\_name   | Name of the resource group where all Data Lab resources are being provisioned         |
 | azure\_region                  | Azure region where infrastructure was deployed                                    |
 | azure\_vpc\_name               | Name of Azure Virtual network where all infrastructure is being deployed          |
 | azure\_subnet\_name            | Name of the Azure public subnet where Edge will be deployed                       |
@@ -851,7 +851,7 @@ List of parameters for Edge node creation:
 | Parameter                  | Description/Value                                                                     |
 |--------------------------------|-----------------------------------------------------------------------------------|
 | conf\_resource                 | edge                                                                              |
-| conf\_os\_family               | Name of the Linux distributive family, which is supported by DLAB (debian/redhat) |
+| conf\_os\_family               | Name of the Linux distributive family, which is supported by Data Lab (debian/redhat) |
 | conf\_service\_base\_name      | Unique infrastructure value, specified during SSN deployment                      |
 | conf\_key\_name                | Name of the uploaded SSH key file (without ".pem")                                |
 | edge\_user\_name               | Name of the user                                                                  |
@@ -890,7 +890,7 @@ List of parameters for Edge node starting:
 | conf\_resource               | edge                                                                      |
 | conf\_service\_base\_name    | Unique infrastructure value, specified during SSN deployment              |
 | edge\_user\_name             | Name of the user                                                          |
-| azure\_resource\_group\_name | Name of the resource group where all DLAb resources are being provisioned |
+| azure\_resource\_group\_name | Name of the resource group where all Data Lab resources are being provisioned |
 | azure\_region                | Azure region where infrastructure was deployed                            |
 | action                       | start                                                                     |
 
@@ -901,7 +901,7 @@ List of parameters for Edge node stopping:
 | conf\_resource               | edge                                                                      |
 | conf\_service\_base\_name    | Unique infrastructure value, specified during SSN deployment              |
 | edge\_user\_name             | Name of the user                                                          |
-| azure\_resource\_group\_name | Name of the resource group where all DLAb resources are being provisioned |
+| azure\_resource\_group\_name | Name of the resource group where all Data Lab resources are being provisioned |
 | action                       | stop                                                                      |
 </details>
 
@@ -941,7 +941,7 @@ List of parameters for Notebook node creation:
 | Parameter                     | Description/Value                                                                 |
 |-------------------------------|-----------------------------------------------------------------------------------|
 | conf\_resource                | notebook                                                                          |
-| conf\_os\_family              | Name of the Linux distributive family, which is supported by DLAB (debian/redhat) |
+| conf\_os\_family              | Name of the Linux distributive family, which is supported by Data Lab (debian/redhat) |
 | conf\_service\_base\_name     | Unique infrastructure value, specified during SSN deployment                      |
 | conf\_key\_name               | Name of the uploaded SSH key file (without ".pem")                                |
 | edge\_user\_name              | Value that previously was used when Edge being provisioned                        |
@@ -961,14 +961,14 @@ List of parameters for Notebook node creation:
 | Parameter                       | Description/Value                                                                 |
 |---------------------------------|-----------------------------------------------------------------------------------|
 | conf\_resource                  | notebook                                                                          |
-| conf\_os\_family                | Name of the Linux distributive family, which is supported by DLAB (debian/redhat) |
+| conf\_os\_family                | Name of the Linux distributive family, which is supported by Data Lab (debian/redhat) |
 | conf\_service\_base\_name       | Unique infrastructure value, specified during SSN deployment                      |
 | conf\_key\_name                 | Name of the uploaded SSH key file (without ".pem")                                |
 | edge\_user\_name                | Value that previously was used when Edge being provisioned                        |
 | azure\_notebook\_instance\_size | Value of the Notebook virtual machine shape                                       |
 | azure\_region                   | Azure region where infrastructure was deployed                                    |
 | azure\_vpc\_name                | NAme of Azure Virtual network where all infrastructure is being deployed          |
-| azure\_resource\_group\_name    | Name of the resource group where all DLAb resources are being provisioned         |
+| azure\_resource\_group\_name    | Name of the resource group where all Data Lab resources are being provisioned         |
 | application                     | Type of the notebook template (jupyter/rstudio/zeppelin/tensor/deeplearning)      |
 | git\_creds                      | User git credentials in JSON format                                               |
 | action                          | Create                                                                            |
@@ -979,7 +979,7 @@ List of parameters for Notebook node creation:
 | Parameter                     | Description/Value                                                                 |
 |-------------------------------|-----------------------------------------------------------------------------------|
 | conf\_resource                | notebook                                                                          |
-| conf\_os\_family              | Name of the Linux distributive family, which is supported by DLAB (debian/redhat) |
+| conf\_os\_family              | Name of the Linux distributive family, which is supported by Data Lab (debian/redhat) |
 | conf\_service\_base\_name     | Unique infrastructure value, specified during SSN deployment                      |
 | conf\_key\_name               | Name of the uploaded SSH key file (without ".pem")                                |
 | edge\_user\_name              | Value that previously was used when Edge being provisioned                        |
@@ -1022,7 +1022,7 @@ List of parameters for Notebook node stopping:
 | conf\_key\_name                 | Name of the uploaded SSH key file (without ".pem")                                |
 | edge\_user\_name                | Value that previously was used when Edge being provisioned                        |
 | notebook\_instance\_name        | Name of the Notebook instance to terminate                                        |
-| azure\_resource\_group\_name    | Name of the resource group where all DLAb resources are being provisioned         |
+| azure\_resource\_group\_name    | Name of the resource group where all Data Lab resources are being provisioned         |
 | action                          | Stop                                                                              |
 </details>
 
@@ -1073,7 +1073,7 @@ List of parameters for Notebook node start:
 | conf\_key\_name                 | Name of the uploaded SSH key file (without ".pem")                                |
 | edge\_user\_name                | Value that previously was used when Edge being provisioned                        |
 | notebook\_instance\_name        | Name of the Notebook instance to terminate                                        |
-| azure\_resource\_group\_name    | Name of the resource group where all DLAb resources are being provisioned         |
+| azure\_resource\_group\_name    | Name of the resource group where all Data Lab resources are being provisioned         |
 | azure\_region                   | Azure region where infrastructure was deployed                                    |
 | git\_creds                      | User git credentials in JSON format                                               |
 | action                          | start                                                                             |
@@ -1125,7 +1125,7 @@ List of parameters for Notebook node termination:
 | conf\_service\_base\_name       | Unique infrastructure value, specified during SSN deployment                      |
 | edge\_user\_name                | Value that previously was used when Edge being provisioned                        |
 | notebook\_instance\_name        | Name of the Notebook instance to terminate                                        |
-| azure\_resource\_group\_name    | Name of the resource group where all DLAb resources are being provisioned         |
+| azure\_resource\_group\_name    | Name of the resource group where all Data Lab resources are being provisioned         |
 | action                          | terminate                                                                         |
 </details>
 
@@ -1222,7 +1222,7 @@ List of parameters for Notebook node to **get list** of available libraries:
 | conf\_key\_name               | Name of the uploaded SSH key file (without ".pem")                                |
 | edge\_user\_name              | Value that previously was used when Edge being provisioned                        |
 | notebook\_instance\_name      | Name of the Notebook instance to terminate                                        |
-| azure\_resource\_group\_name  | Name of the resource group where all DLAb resources are being provisioned         |
+| azure\_resource\_group\_name  | Name of the resource group where all Data Lab resources are being provisioned         |
 | application                   | Type of the notebook template (jupyter/rstudio/zeppelin/tensor/deeplearning)      |
 | action                        | lib_list                                                                          |
 
@@ -1235,7 +1235,7 @@ List of parameters for Notebook node to **install** additional libraries:
 | conf\_key\_name               | Name of the uploaded SSH key file (without ".pem")                                   |
 | edge\_user\_name              | Value that previously was used when Edge being provisioned                           |
 | notebook\_instance\_name      | Name of the Notebook instance to terminate                                           |
-| azure\_resource\_group\_name  | Name of the resource group where all DLAb resources are being provisioned            |
+| azure\_resource\_group\_name  | Name of the resource group where all Data Lab resources are being provisioned            |
 | application                   | Type of the notebook template (jupyter/rstudio/zeppelin/tensor/deeplearning)         |
 | libs                          | List of additional libraries in JSON format with type (os_pkg/pip2/pip3/r_pkg/others)|
 | action                        | lib_install                                                                          |
@@ -1321,7 +1321,7 @@ List of parameters for Notebook node to **manage git credentials**:
 | conf\_key\_name               | Name of the uploaded SSH key file (without ".pem")                                |
 | edge\_user\_name              | Value that previously was used when Edge being provisioned                        |
 | notebook\_instance\_name      | Name of the Notebook instance to terminate                                        |
-| azure\_resource\_group\_name  | Name of the resource group where all DLAb resources are being provisioned         |
+| azure\_resource\_group\_name  | Name of the resource group where all Data Lab resources are being provisioned         |
 | git\_creds                    | User git credentials in JSON format                                               |
 | action                        | git\_creds                                                                        |
 </details>
@@ -1550,7 +1550,7 @@ List of parameters for dataengine cluster creation:
 | conf\_resource                 | dataengine                                                                        |
 | conf\_service\_base\_name      | Unique infrastructure value, specified during SSN deployment                      |
 | conf\_key\_name                | Name of the uploaded SSH key file (without ".pem")                                |
-| conf\_os\_family               | Name of the Linux distributive family, which is supported by DLab (Debian/RedHat) |
+| conf\_os\_family               | Name of the Linux distributive family, which is supported by Data Lab (Debian/RedHat) |
 | notebook\_instance\_name       | Name of the Notebook dataengine will be linked to                                 |
 | dataengine\_instance\_count    | Number of nodes in cluster                                                        |
 | edge\_user\_name               | Value that previously was used when Edge being provisioned                        |
@@ -1567,7 +1567,7 @@ List of parameters for dataengine cluster creation:
 | conf\_resource                 | dataengine                                                                        |
 | conf\_service\_base\_name      | Unique infrastructure value, specified during SSN deployment                      |
 | conf\_key\_name                | Name of the uploaded SSH key file (without ".pem")                                |
-| conf\_os\_family               | Name of the Linux distributive family, which is supported by DLab (Debian/RedHat) |
+| conf\_os\_family               | Name of the Linux distributive family, which is supported by Data Lab (Debian/RedHat) |
 | notebook\_instance\_name       | Name of the Notebook dataengine will be linked to                                 |
 | dataengine\_instance\_count    | Number of nodes in cluster                                                        |
 | edge\_user\_name               | Value that previously was used when Edge being provisioned                        |
@@ -1575,7 +1575,7 @@ List of parameters for dataengine cluster creation:
 | azure\_region                  | Azure region where all infrastructure was deployed                                |
 | azure\_dataengine\_master\_size| Size of master node                                                               |
 | azure\_dataengine\_slave\_size | Size of slave node                                                                |
-| azure\_resource\_group\_name   | Name of the resource group where all DLAb resources are being provisioned         |
+| azure\_resource\_group\_name   | Name of the resource group where all Data Lab resources are being provisioned         |
 | azure\_subnet\_name            | Name of the Azure public subnet where Edge was deployed                           |
 | action                         | create                                                                            |
 </details>
@@ -1587,7 +1587,7 @@ List of parameters for dataengine cluster creation:
 | conf\_resource               | dataengine                                                                        |
 | conf\_service\_base\_name    | Unique infrastructure value, specified during SSN deployment                      |
 | conf\_key\_name              | Name of the uploaded SSH key file (without ".pem")                                |
-| conf\_os\_family             | Name of the Linux distributive family, which is supported by DLab (Debian/RedHat) |
+| conf\_os\_family             | Name of the Linux distributive family, which is supported by Data Lab (Debian/RedHat) |
 | notebook\_instance\_name     | Name of the Notebook dataengine will be linked to                                 |
 | gcp\_vpc\_name               | GCP VPC name                                                                      |
 | gcp\_subnet\_name            | GCP subnet name                                                                   |
@@ -1632,7 +1632,7 @@ List of parameters for dataengine cluster termination:
 | computational\_name          | Name of cluster                                                          |
 | notebook\_instance\_name     | Name of the Notebook instance which dataengine is linked to              |
 | azure\_region                | Azure region where infrastructure was deployed                           |
-| azure\_resource\_group\_name | Name of the resource group where all DLAb resources are being provisioned|
+| azure\_resource\_group\_name | Name of the resource group where all Data Lab resources are being provisioned|
 | action                       | Terminate                                                                |
 </details>
 
@@ -1726,7 +1726,7 @@ List of parameters for Dataengine node to **get list** of available libraries:
 | conf\_service\_base\_name     | Unique infrastructure value, specified during SSN deployment                      |
 | conf\_key\_name               | Name of the uploaded SSH key file (without ".pem")                                |
 | edge\_user\_name              | Value that previously was used when Edge being provisioned                        |
-| azure\_resource\_group\_name  | Name of the resource group where all DLAb resources are being provisioned         |
+| azure\_resource\_group\_name  | Name of the resource group where all Data Lab resources are being provisioned         |
 | computational\_id             | Name of cluster                                                                   |
 | application                   | Type of the notebook template (jupyter/rstudio/zeppelin/tensor/deeplearning)      |
 | action                        | lib_list                                                                          |
@@ -1739,7 +1739,7 @@ List of parameters for Dataengine node to **install** additional libraries:
 | conf\_service\_base\_name     | Unique infrastructure value, specified during SSN deployment                      |
 | conf\_key\_name               | Name of the uploaded SSH key file (without ".pem")                                |
 | edge\_user\_name              | Value that previously was used when Edge being provisioned                        |
-| azure\_resource\_group\_name  | Name of the resource group where all DLAb resources are being provisioned         |
+| azure\_resource\_group\_name  | Name of the resource group where all Data Lab resources are being provisioned         |
 | computational\_id             | Name of cluster                                                                   |
 | application                   | Type of the notebook template (jupyter/rstudio/zeppelin/tensor/deeplearning)      |
 | action                        | lib_install                                                                       |
@@ -1778,15 +1778,15 @@ List of parameters for Dataengine node to **install** additional libraries:
 
 ## Configuration files <a name="Configuration_files"></a>
 
-DLab configuration files are located on SSN node by following path:
--   /opt/dlab/conf ssn.yml – basic configuration for all java services;
+Data Lab configuration files are located on SSN node by following path:
+-   /opt/datalab/conf ssn.yml – basic configuration for all java services;
 -   provisioning.yml – Provisioning Service configuration file;for
 -   security.yml – Security Service configuration file;
 -   self-service.yml – Self-Service configuration file.
 
 ## Starting/Stopping services <a name="Starting_Stopping_services"></a>
 
-All DLab services running as OS services and have next syntax for
+All Data Lab services running as OS services and have next syntax for
 starting and stopping:
 ```
 sudo supervisorctl {start | stop | status} [all | provserv | secserv | ui]
@@ -1799,16 +1799,16 @@ sudo supervisorctl {start | stop | status} [all | provserv | secserv | ui]
 -   secserv – execute command for Security Service;
 -   ui – execute command for Self-Service.
 
-## DLab Web UI <a name="DLab Web UI"></a>
+## Data Lab Web UI <a name="DLab_Web_UI"></a>
 
-DLab self service is listening to the secure 8443 port. This port is used for secure local communication with 
+Data Lab self service is listening to the secure 8443 port. This port is used for secure local communication with 
 provisioning service.
 
 There is also Nginx proxy server running on Self-Service node, which proxies remote connection to local 8443 port.
 Nginx server is listening to both 80 and 443 ports by default. It means that you could access self-service Web UI using 
 non-secure connections (80 port) or secure (443 port).
 
-Establishing connection using 443 port you should take into account that DLab uses self-signed certificate from the box, 
+Establishing connection using 443 port you should take into account that Data Lab uses self-signed certificate from the box, 
 however you are free to switch Nginx to use your own domain-verified certificate.
 
 To disable non-secure connection please do the following:
@@ -1848,13 +1848,13 @@ sudo supervisorctl start ui
 ```
 If you want to load report manually, or use external scheduler use following command:
 ```
-java -jar /opt/dlab/webapp/lib/billing/billing-aws.x.y.jar --conf /opt/dlab/conf/billing.yml
+java -jar /opt/datalab/webapp/lib/billing/billing-aws.x.y.jar --conf /opt/datalab/conf/billing.yml
 or
-java -cp /opt/dlab/webapp/lib/billing/billing-aws.x.y.jar com.epam.dlab.BillingTool --conf /opt/dlab/conf/billing.yml
+java -cp /opt/datalab/webapp/lib/billing/billing-aws.x.y.jar com.epam.datalab.BillingTool --conf /opt/datalab/conf/billing.yml
 ```
 If you want billing to work as a separate process from the Self-Service use following command:
 ```
-java -cp /opt/dlab/webapp/lib/billing/billing-aws.x.y.jar com.epam.dlab.BillingScheduler --conf /opt/dlab/conf/billing.yml
+java -cp /opt/datalab/webapp/lib/billing/billing-aws.x.y.jar com.epam.datalab.BillingScheduler --conf /opt/datalab/conf/billing.yml
 ```
 </details>
 
@@ -1867,24 +1867,24 @@ Billing module is implemented as a separate jar file and can be running in the f
 
 If you want to start billing module as a separate process use the following command:
 ```
-java -jar /opt/dlab/webapp/lib/billing/billing-azure.x.y.jar /opt/dlab/conf/billing.yml
+java -jar /opt/datalab/webapp/lib/billing/billing-azure.x.y.jar /opt/datalab/conf/billing.yml
 ```
 </details>
 
 ## Backup and Restore <a name="Backup_and_Restore"></a>
 
-All DLab configuration files, keys, certificates, jars, database and logs can be saved to backup file.
+All Data Lab configuration files, keys, certificates, jars, database and logs can be saved to backup file.
 
-Scripts for backup and restore is located in ```dlab_path/tmp/```. Default: ```/opt/dlab/tmp/```
+Scripts for backup and restore is located in ```datalab_path/tmp/```. Default: ```/opt/datalab/tmp/```
 
 List of parameters for run backup:
 
 | Parameter      | Description/Value                                                                                                       |
 |----------------|-------------------------------------------------------------------------------------------------------------------------|
-| --dlab\_path   | Path to DLab. Default: /opt/dlab/                                                                                       |
+| --datalab\_path   | Path to Data Lab. Default: /opt/datalab/                                                                                       |
 | --configs      | Comma separated names of config files, like "security.yml", etc. Default: all                                           |
 | --keys         | Comma separated names of keys, like "user_name.pub". Default: all                                                       |
-| --certs        | Comma separated names of SSL certificates and keys, like "dlab.crt", etc. Also available: skip. Default: all |
+| --certs        | Comma separated names of SSL certificates and keys, like "atalab.crt", etc. Also available: skip. Default: all |
 | --jars         | Comma separated names of jar application, like "self-service" (without .jar), etc. Also available: all. Default: skip   |
 | --db           | Mongo DB. Key without arguments. Default: disable                                                                       |
 | --logs         | All logs (include docker). Key without arguments. Default: disable                                                      |
@@ -1893,10 +1893,10 @@ List of parameters for run restore:
 
 | Parameter      | Description/Value                                                                                                       |
 |----------------|-------------------------------------------------------------------------------------------------------------------------|
-| --dlab\_path   | Path to DLab. Default: /opt/dlab/                                                                                       |
+| --datalab\_path   | Path to Data Lab. Default: /opt/datalab/                                                                                       |
 | --configs      | Comma separated names of config files, like "security.yml", etc. Default: all                                           |
 | --keys         | Comma separated names of keys, like "user_name.pub". Default: all                                                       |
-| --certs        | Comma separated names of SSL certificates and keys, like "dlab.crt", etc. Also available: skip. Default: all |
+| --certs        | Comma separated names of SSL certificates and keys, like "datalab.crt", etc. Also available: skip. Default: all |
 | --jars         | Comma separated names of jar application, like "self-service" (without .jar), etc. Also available: all. Default: skip   |
 | --db           | Mongo DB. Key without arguments. Default: disable                                                                       |
 | --file         | Full or relative path to backup file or folder. Required field                                                          |
@@ -1910,13 +1910,13 @@ List of parameters for run restore:
 
 Own GitLab server can be deployed from SSN node with script, which located in:
 
-```dlab_path/tmp/gitlab```. Default: ```/opt/dlab/tmp/gitlab```
+```datalab_path/tmp/gitlab```. Default: ```/opt/datalab/tmp/gitlab```
 
 All initial configuration parameters located in ```gitlab.ini``` file.
 
 Some of parameters are already setuped from SSN provisioning.
 
-GitLab uses the same LDAP server as DLab.
+GitLab uses the same LDAP server as Data Lab.
 
 To deploy Gitlab server, set all needed parameters in ```gitlab.ini``` and run script:
 
@@ -1928,18 +1928,18 @@ To deploy Gitlab server, set all needed parameters in ```gitlab.ini``` and run s
 
 ## Troubleshooting <a name="Troubleshooting"></a>
 
-If the parameter dlab\_path of configuration file dlab.ini wasn’t changed, the path to DLab service would default to:
+If the parameter datalab\_path of configuration file datalab.ini wasn’t changed, the path to Data Lab service would default to:
 
--   /opt/dlab/ - main directory of DLab service
--   /var/opt/dlab/log/ or /var/log/dlab/ - path to log files
+-   /opt/datalab/ - main directory of Data Lab service
+-   /var/opt/datalab/log/ or /var/log/datalab/ - path to log files
 
 To check logs of Docker containers run the following commands:
 ```
 docker ps -a – to get list of containers which were executed.
 ...
-a85d0d3c27aa docker.dlab-dataengine:latest "/root/entrypoint...." 2 hours ago Exited (0) 2 hours ago infallible_gallileo
-6bc2afeb888e docker.dlab-jupyter:latest "/root/entrypoint...." 2 hours ago Exited (0) 2 hours ago practical_cori
-51b71c5d4aa3 docker.dlab-zeppelin:latest "/root/entrypoint...." 2 hours ago Exited (0) 2 hours ago determined_knuth
+a85d0d3c27aa docker.datalab-dataengine:latest "/root/entrypoint...." 2 hours ago Exited (0) 2 hours ago infallible_gallileo
+6bc2afeb888e docker.datalab-jupyter:latest "/root/entrypoint...." 2 hours ago Exited (0) 2 hours ago practical_cori
+51b71c5d4aa3 docker.datalab-zeppelin:latest "/root/entrypoint...." 2 hours ago Exited (0) 2 hours ago determined_knuth
 ...
 docker logs <container_id> – to get log for particular Docker container.
 ```
@@ -1956,32 +1956,32 @@ docker-build <notebook_name> #to rebuild certain images
 You can also rebuild images manually by executing the following steps:
 
 1.  SSH to SSN instance
-2.  go to */opt/dlab/sources/*
+2.  go to */opt/datalab/sources/*
 3.  Modify needed files
 [4]. [ONLY FOR AZURE] Copy service principal json file with credentials to base/azure_auth.json
 5.  Rebuild proper Docker images, using one or several commands (depending on what files you’ve changed):
 ```
-docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/base_Dockerfile -t docker.dlab-base .
-docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/edge_Dockerfile -t docker.dlab-edge .
-docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/jupyter_Dockerfile -t docker.dlab-jupyter .
-docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/jupyterlab_Dockerfile -t docker.dlab-jupyterlab .
-docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/rstudio_Dockerfile -t docker.dlab-rstudio .
-docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/zeppelin_Dockerfile -t docker.dlab-zeppelin .
-docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/tensor_Dockerfile -t docker.dlab-tensor .
-docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/tensor-rstudio_Dockerfile -t docker.dlab-tensor-rstudio .
-docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/deeplearning_Dockerfile -t docker.dlab-deeplearning .
-docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/dataengine_Dockerfile -t docker.dlab-dataengine .
+docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/base_Dockerfile -t docker.datalab-base .
+docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/edge_Dockerfile -t docker.datalab-edge .
+docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/jupyter_Dockerfile -t docker.datalab-jupyter .
+docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/jupyterlab_Dockerfile -t docker.datalab-jupyterlab .
+docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/rstudio_Dockerfile -t docker.datalab-rstudio .
+docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/zeppelin_Dockerfile -t docker.datalab-zeppelin .
+docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/tensor_Dockerfile -t docker.datalab-tensor .
+docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/tensor-rstudio_Dockerfile -t docker.datalab-tensor-rstudio .
+docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/deeplearning_Dockerfile -t docker.datalab-deeplearning .
+docker build --build-arg OS=<os_family> --file general/files/<cloud_provider>/dataengine_Dockerfile -t docker.datalab-dataengine .
 ```
 
 ----------------
 # Development <a name="Development"></a>
 
-DLab services could be ran in development mode. This mode emulates real work an does not create any resources on cloud 
+Data Lab services could be ran in development mode. This mode emulates real work an does not create any resources on cloud 
 provider environment.
 
 ## Folder structure <a name="Folder_structure"></a>
 
-    dlab
+    datalab
     ├───infrastructure-provisioning
     └───services
         ├───billing
@@ -2002,7 +2002,7 @@ provider environment.
 
 ## Pre-requisites <a name="Pre-requisites"></a>
 
-In order to start development of Front-end Web UI part of DLab - Git repository should be cloned and the following 
+In order to start development of Front-end Web UI part of Data Lab - Git repository should be cloned and the following 
 packages should be installed:
 
 -   Git 1.7 or higher
@@ -2058,7 +2058,7 @@ edge, etc.
 | EdgeResource              | Create<br>Start<br>Stop                 | Provides Docker actions for EDGE node management.                            |
 | ExploratoryResource       | Create<br>Start<br>Stop<br>Terminate    | Provides Docker actions for working with exploratory environment management. |
 | GitExploratoryResource    | Update git greds                        | Docker actions to provision git credentials to running notebooks             |
-| InfrastructureResource    | Status                                  | Docker action for obtaining status of DLab infrastructure instances.         |
+| InfrastructureResource    | Status                                  | Docker action for obtaining status of Data Lab infrastructure instances.         |
 | LibExploratoryResource    | Lib list<br>Install lib                 | Docker actions to install libraries on netobboks                             |
 
 Some class names may have endings like Aws or Azure(e.g. ComputationalResourceAws, ComputationalResourceAzure, etc...). 
@@ -2072,13 +2072,13 @@ LDAP only provides with authentication end point that allows to verify authentic
 If you use AWS cloud provider LDAP + AWS authentication could be useful as it allows to combine LDAP authentication and 
 verification if user has any role in AWS account
 
-DLab provides OAuth2(client credentials and authorization code flow) security authorization mechanism for Azure users. 
+Data Lab provides OAuth2(client credentials and authorization code flow) security authorization mechanism for Azure users. 
 This kind of authentication is required when you are going to use Data Lake. If Data Lake is not enabled you have two 
 options LDAP or OAuth2
 If OAuth2 is in use security-service validates user's permissions to configured permission scope(resource in Azure).
-If Data Lake is enabled default permission scope(can be configured manually after deploy DLab) is Data Lake Store 
+If Data Lake is enabled default permission scope(can be configured manually after deploy Data Lab) is Data Lake Store 
 account so only if user has any role in scope of Data Lake Store Account resource he/she will be allowed to log in
-If Data Lake is disabled but Azure OAuth2 is in use default permission scope will be Resource Group where DLab is 
+If Data Lake is disabled but Azure OAuth2 is in use default permission scope will be Resource Group where Data Lab is 
 created and only users who have any roles in the resource group will be allowed to log in.
 
 
@@ -2088,7 +2088,7 @@ created and only users who have any roles in the resource group will be allowed
 
 Web UI sources are part of Self-Service.
 
-Sources are located in dlab/services/self-service/src/main/resources/webapp
+Sources are located in datalab/services/self-service/src/main/resources/webapp
 
 | Main pages                    | Components and Services |
 |-------------------------------|-------------------------|
@@ -2131,13 +2131,13 @@ db.createUser(
 )
 ```
 
-  * Load collections form file dlab/services/settings/(aws|azure)/mongo_settings.json
+  * Load collections form file datalab/services/settings/(aws|azure)/mongo_settings.json
 
 ```
 mongoimport -u admin -p <password> -d <database_name> -c settings mongo_settings.json
 ```
 
-  * Load collections form file dlab/infrastructure-provisioning/src/ssn/files/mongo_roles.json
+  * Load collections form file datalab/infrastructure-provisioning/src/ssn/files/mongo_roles.json
 
 ```
 mongoimport -u admin -p <password> -d <database_name> --jsonArray -c roles mongo_roles.json
@@ -2146,7 +2146,7 @@ mongoimport -u admin -p <password> -d <database_name> --jsonArray -c roles mongo
 ### Setting up environment options
 
   * Set option CLOUD_TYPE to aws/azure, DEV\_MODE to **true**, mongo database name and password in configuration file 
-  dlab/infrastructure-provisioning/src/ssn/templates/ssn.yml
+  datalab/infrastructure-provisioning/src/ssn/templates/ssn.yml
 
 ```
 <#assign CLOUD_TYPE="aws">
@@ -2158,7 +2158,7 @@ mongo:
   password: <password>
 ```
 
-  * Add system environment variable DLAB\_CONF\_DIR=&lt;dlab\_root\_folder&gt;/dlab/infrastructure-provisioning/src/ssn/templates/ssn.yml or create two symlinks in dlab/services/provisioning-service and dlab/services/self-service folders for file dlab/infrastructure-provisioning/src/ssn/templates/ssn.yml.
+  * Add system environment variable DATA_LAB\_CONF\_DIR=&lt;datalab\_root\_folder&gt;/datalab/infrastructure-provisioning/src/ssn/templates/ssn.yml or create two symlinks in datalab/services/provisioning-service and datalab/services/self-service folders for file datalab/infrastructure-provisioning/src/ssn/templates/ssn.yml.
 
 *Unix*
 
@@ -2176,7 +2176,7 @@ mklink ssn.yml ..\\..\\infrastructure-provisioning\\src\\ssn\\templates\\ssn.yml
 
 ```
 /var/opt/dlab/log/ssn
-/opt/dlab/tmp/result
+/opt/datalab/tmp/result
 ```
 
 ### Install Node.js
@@ -2192,12 +2192,12 @@ npm install npm@latest -g
 
 ### Build Web UI components
 
-  * Change folder to dlab/services/self-service/src/main/resources/webapp and install the dependencies from a package.json manifest
+  * Change folder to datalab/services/self-service/src/main/resources/webapp and install the dependencies from a package.json manifest
 
 ```
 npm install
 ```
-  * Replace CLOUD_PROVIDER options with aws|azure in dictionary file<br> dlab/services/self-service/src/main/resources/webapp/src/dictionary/global.dictionary.ts
+  * Replace CLOUD_PROVIDER options with aws|azure in dictionary file<br> datalab/services/self-service/src/main/resources/webapp/src/dictionary/global.dictionary.ts
 
 ```
 import { NAMING_CONVENTION } from './(aws|azure).dictionary';
@@ -2230,24 +2230,24 @@ Please find below set of commands to create certificate, depending on OS.
 
 Pay attention that the last command has to be executed with administrative permissions.
 ```
-keytool -genkeypair -alias dlab -keyalg RSA -storepass KEYSTORE_PASSWORD -keypass KEYSTORE_PASSWORD -keystore ~/keys/dlab.keystore.jks -keysize 2048 -dname "CN=localhost"
-keytool -exportcert -alias dlab -storepass KEYSTORE_PASSWORD -file ~/keys/dlab.crt -keystore ~/keys/dlab.keystore.jks
-sudo keytool -importcert -trustcacerts -alias dlab -file ~/keys/dlab.crt -noprompt -storepass changeit -keystore ${JRE_HOME}/lib/security/cacerts
+keytool -genkeypair -alias datalab -keyalg RSA -storepass KEYSTORE_PASSWORD -keypass KEYSTORE_PASSWORD -keystore ~/keys/datalab.keystore.jks -keysize 2048 -dname "CN=localhost"
+keytool -exportcert -alias datalab -storepass KEYSTORE_PASSWORD -file ~/keys/datalab.crt -keystore ~/keys/datalab.keystore.jks
+sudo keytool -importcert -trustcacerts -alias datalab -file ~/keys/datalab.crt -noprompt -storepass changeit -keystore ${JRE_HOME}/lib/security/cacerts
 ```
 #### Create Windows server certificate
 
 Pay attention that the last command has to be executed with administrative permissions.
 To achieve this the command line (cmd) should be ran with administrative permissions.  
 ```
-"%JRE_HOME%\bin\keytool" -genkeypair -alias dlab -keyalg RSA -storepass KEYSTORE_PASSWORD -keypass KEYSTORE_PASSWORD -keystore <DRIVE_LETTER>:\home\%USERNAME%\keys\dlab.keystore.jks -keysize 2048 -dname "CN=localhost"
-"%JRE_HOME%\bin\keytool" -exportcert -alias dlab -storepass KEYSTORE_PASSWORD -file <DRIVE_LETTER>:\home\%USERNAME%\keys\dlab.crt -keystore <DRIVE_LETTER>:\home\%USERNAME%\keys\dlab.keystore.jks
-"%JRE_HOME%\bin\keytool" -importcert -trustcacerts -alias dlab -file <DRIVE_LETTER>:\home\%USERNAME%\keys\dlab.crt -noprompt -storepass changeit -keystore "%JRE_HOME%\lib\security\cacerts"
+"%JRE_HOME%\bin\keytool" -genkeypair -alias datalab -keyalg RSA -storepass KEYSTORE_PASSWORD -keypass KEYSTORE_PASSWORD -keystore <DRIVE_LETTER>:\home\%USERNAME%\keys\datalab.keystore.jks -keysize 2048 -dname "CN=localhost"
+"%JRE_HOME%\bin\keytool" -exportcert -alias datalab -storepass KEYSTORE_PASSWORD -file <DRIVE_LETTER>:\home\%USERNAME%\keys\datalab.crt -keystore <DRIVE_LETTER>:\home\%USERNAME%\keys\datalab.keystore.jks
+"%JRE_HOME%\bin\keytool" -importcert -trustcacerts -alias datalab -file <DRIVE_LETTER>:\home\%USERNAME%\keys\datalab.crt -noprompt -storepass changeit -keystore "%JRE_HOME%\lib\security\cacerts"
 
 Useful command
-"%JRE_HOME%\bin\keytool" -list -alias dlab -storepass changeit -keystore "%JRE_HOME%\lib\security\cacerts"
-"%JRE_HOME%\bin\keytool" -delete -alias dlab -storepass changeit -keystore "%JRE_HOME%\lib\security\cacerts"
+"%JRE_HOME%\bin\keytool" -list -alias datalab -storepass changeit -keystore "%JRE_HOME%\lib\security\cacerts"
+"%JRE_HOME%\bin\keytool" -delete -alias datalab -storepass changeit -keystore "%JRE_HOME%\lib\security\cacerts"
 ```
-Where the ```<DRIVE_LETTER>``` must be the drive letter where you run the DLab.
+Where the ```<DRIVE_LETTER>``` must be the drive letter where you run the Data Lab.
 
 
 ## How to run locally <a name="run_locally"></a>
@@ -2256,7 +2256,7 @@ There is a possibility to run Self-Service and Provisioning Service locally. All
 Docker are mocked and instance creation status will be persisted to Mongo (only without real impact on Docker and AWS). 
 Security Service can\`t be running on local machine because of local LDAP mocking complexity.
 
-Both services, Self-Service and Provisioning Service are dependent on dlab/provisioning-infrastructure/ssn/templates/ssn.yml
+Both services, Self-Service and Provisioning Service are dependent on datalab/provisioning-infrastructure/ssn/templates/ssn.yml
 configuration file. Both services have main functions as entry point, SelfServiceApplication for Self-Service and ProvisioningServiceApplication for Provisioning Service. Services could be started by running main methods of these classes. Both main functions require two arguments:
 
   * Run mode (“server”)
@@ -2282,11 +2282,11 @@ Password: <any>
 
 ### DevOps components overview
 
-The following list shows common structure of scripts for deploying DLab
+The following list shows common structure of scripts for deploying Data Lab
 
 #### Folder structure
 
-    dlab
+    datalab
     └───infrastructure-provisioning
         └───src
             ├───base
@@ -2321,7 +2321,7 @@ OS, CLOUD dependent and common for few templates scripts, functions, files are l
 
     general
     ├───api – all available API
-    ├───conf – DLab configuration
+    ├───conf – Data Lab configuration
     ├───files – OS/Cloud dependent files
     ├───lib – OS/Cloud dependent functions
     ├───scripts – OS/Cloud dependent Python scripts
@@ -2363,20 +2363,20 @@ Available Docker images and their actions:
 
 ##### Docker and python execution workflow on example of SSN node
 
--   Docker command for building images *docker.dlab-base* and *docker.dlab-ssn*:
+-   Docker command for building images *docker.datalab-base* and *docker.datalab-ssn*:
 ```
-sudo docker build --build-arg OS=debian  --file general/files/aws/base_Dockerfile -t docker.dlab-base . ;
-sudo docker build --build-arg OS=debian  --file general/files/aws/ssn_Dockerfile -t docker.dlab-ssn . ;
+sudo docker build --build-arg OS=debian  --file general/files/aws/base_Dockerfile -t docker.datalab-base . ;
+sudo docker build --build-arg OS=debian  --file general/files/aws/ssn_Dockerfile -t docker.datalab-ssn . ;
 ```
 Example of SSN Docker file:
 ```
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY ssn/ /root/
 COPY general/scripts/aws/ssn_* /root/scripts/
-COPY general/lib/os/${OS}/ssn_lib.py /usr/lib/python2.7/dlab/ssn_lib.py
+COPY general/lib/os/${OS}/ssn_lib.py /usr/lib/python2.7/datalab/ssn_lib.py
 COPY general/files/aws/ssn_policy.json /root/files/
 COPY general/templates/aws/jenkins_jobs /root/templates/jenkins_jobs
 
@@ -2390,7 +2390,7 @@ Using this Docker file, all required scripts and files will be copied to Docker
 
 -   Docker command for building SSN:
 ```
-docker run -i -v /root/KEYNAME.pem:/root/keys/KEYNAME.pem –v /web_app:/root/web_app -e "conf_os_family=debian" -e "conf_cloud_provider=aws" -e "conf_resource=ssn" -e "aws_ssn_instance_size=t2.medium" -e "aws_region=us-west-2" -e "aws_vpc_id=vpc-111111" -e "aws_subnet_id=subnet-111111" -e "aws_security_groups_ids=sg-11111,sg-22222,sg-33333" -e "conf_key_name=KEYNAME" -e "conf_service_base_name=dlab_test" -e "aws_access_key=Access_Key_ID" -e "aws_secret_access_key=Secret_Access_Key" -e "co [...]
+docker run -i -v /root/KEYNAME.pem:/root/keys/KEYNAME.pem –v /web_app:/root/web_app -e "conf_os_family=debian" -e "conf_cloud_provider=aws" -e "conf_resource=ssn" -e "aws_ssn_instance_size=t2.medium" -e "aws_region=us-west-2" -e "aws_vpc_id=vpc-111111" -e "aws_subnet_id=subnet-111111" -e "aws_security_groups_ids=sg-11111,sg-22222,sg-33333" -e "conf_key_name=KEYNAME" -e "conf_service_base_name=datalab_test" -e "aws_access_key=Access_Key_ID" -e "aws_secret_access_key=Secret_Access_Key" -e  [...]
 ```
 
 -   Docker executes *entrypoint.py* script with action *create*. *Entrypoint.py* will set environment variables, 
@@ -2431,7 +2431,7 @@ docker run -i -v /root/KEYNAME.pem:/root/keys/KEYNAME.pem –v /web_app:/root/we
     1. Installing prerequisites
     2. Installing required packages
     3. Configuring Docker
-    4. Configuring DLab Web UI
+    4. Configuring Data Lab Web UI
 
 -   If all scripts/function are executed successfully, Docker container will stop and SSN node will be created.
 
@@ -2440,13 +2440,13 @@ docker run -i -v /root/KEYNAME.pem:/root/keys/KEYNAME.pem –v /web_app:/root/we
 SSN:
 
 ```
-docker run -i -v <key_path><key_name>.pem:/root/keys/<key_name>.pem -e "region=<region>" -e "conf_service_base_name=<Infrastructure_Tag>" -e  “conf_resource=ssn" -e "aws_access_key=<Access_Key_ID>" -e "aws_secret_access_key=<Secret_Access_Key>" docker.dlab-ssn --action <action>
+docker run -i -v <key_path><key_name>.pem:/root/keys/<key_name>.pem -e "region=<region>" -e "conf_service_base_name=<Infrastructure_Tag>" -e  “conf_resource=ssn" -e "aws_access_key=<Access_Key_ID>" -e "aws_secret_access_key=<Secret_Access_Key>" docker.datalab-ssn --action <action>
 ```
 All parameters are listed in section "Self-ServiceNode" chapter.
 
 Other images:
 ```
-docker run -i -v /home/<user>/keys:/root/keys  -v /opt/dlab/tmp/result:/response -v /var/opt/dlab/log/<image>:/logs/<image>  -e <variable1> –e <variable2> docker.dlab-<image> --action <action>
+docker run -i -v /home/<user>/keys:/root/keys  -v /opt/datalab/tmp/result:/response -v /var/opt/datalab/log/<image>:/logs/<image>  -e <variable1> –e <variable2> docker.datalab-<image> --action <action>
 ```
 
 #### How to add a new template
@@ -2559,7 +2559,7 @@ Other scripts, responsible for configuring Jupyter node are placed in *infrastru
 
 -   *infrastructure-provisioning/src/general/files/<cloud_provider>/my-tool_Dockerfile* – used for building template 
     Docker image and describes which files, scripts, templates are required and will be copied to template Docker image.
--   *infrastructure-provisioning/src/general/files/<cloud_provider>/my-tool_descriptsion.json* – JSON file for DLab Web 
+-   *infrastructure-provisioning/src/general/files/<cloud_provider>/my-tool_descriptsion.json* – JSON file for Data Lab Web 
     UI. In this file you can specify:
   * exploratory\_environment\_shapes – list of EC2 shapes
   * exploratory\_environment\_versions – description of template
@@ -2742,9 +2742,9 @@ User`s authentication into LDAP would be done for DN with following template **l
 dc=alexion,dc=cloud'**, where CN is attribute retrieved by  **“userLookUp”** script.
 
 ## Azure OAuth2 Authentication <a name="Azure_OAuth2_Authentication"></a>
-DLab supports OAuth2 authentication that is configured automatically in Security Service and Self Service after DLab deployment.
+Data Lab supports OAuth2 authentication that is configured automatically in Security Service and Self Service after Data Lab deployment.
 Please see explanation details about configuration parameters for Self Service and Security Service below.
-DLab supports client credentials(username + password) and authorization code flow for authentication.
+Data Lab supports client credentials(username + password) and authorization code flow for authentication.
 
 
 ### Azure OAuth2 Self Service configuration
@@ -2767,12 +2767,12 @@ where:
 - **tenant** - tenant id of your company
 - **authority** - Microsoft login endpoint
 - **clientId** - id of the application that users log in through
-- **redirectUrl** - redirect URL to DLab application after try to login to Azure using OAuth2
-- **responseMode** - defines how Azure sends authorization code or error information to DLab during log in procedure
+- **redirectUrl** - redirect URL to Data Lab application after try to login to Azure using OAuth2
+- **responseMode** - defines how Azure sends authorization code or error information to Data Lab during log in procedure
 - **prompt** - defines kind of prompt during Oauth2 login
-- **silent** - defines if DLab tries to log in user without interaction(true/false), if false DLab tries to login user 
+- **silent** - defines if Data Lab tries to log in user without interaction(true/false), if false Data Lab tries to login user 
   with configured prompt
-- **loginPage** - start page of DLab application
+- **loginPage** - start page of Data Lab application
 - **maxSessionDurabilityMilliseconds** - max user session durability. user will be asked to login after this period 
   of time and when he/she creates ot starts notebook/cluster. This operation is needed to update refresh_token that is used by notebooks to access Data Lake Store
 
@@ -2798,7 +2798,7 @@ where:
 - **tenant** - tenant id of your company
 - **authority** - Microsoft login endpoint
 - **clientId** - id of the application that users log in through
-- **redirectUrl** - redirect URL to DLab application after try to login to Azure using OAuth2
+- **redirectUrl** - redirect URL to Data Lab application after try to login to Azure using OAuth2
 - **validatePermissionScope** - defines(true/false) if user's permissions should be validated to resource that is 
   provided in permissionScope parameter. User will be logged in onlu in case he/she has any role in resource IAM 
   described with permissionScope parameter
diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md
index cf4e3a3..cdb17ca 100644
--- a/RELEASE_NOTES.md
+++ b/RELEASE_NOTES.md
@@ -1,21 +1,21 @@
-# DLab is Self-service, Fail-safe Exploratory Environment for Collaborative Data Science Workflow
+# Data Lab is Self-service, Fail-safe Exploratory Environment for Collaborative Data Science Workflow
 
 ## New features in v2.2
 **All Cloud platforms:**
-- added concept of Projects into DLab. Now users can unite under Projects and collaborate
-- for ease of use we've added web terminal for all DLab Notebooks
+- added concept of Projects into Data Lab. Now users can unite under Projects and collaborate
+- for ease of use we've added web terminal for all Data Lab Notebooks
 - updated versions of installed software:
 	* angular 8.2.7
 
 **GCP:**
-- added billing report to monitor Cloud resources usage into DLab, including ability to manage billing quotas
+- added billing report to monitor Cloud resources usage into Data Lab, including ability to manage billing quotas
 - updated versions of installed software:
 	* Dataproc 1.3
 
 ## Improvements in v2.2
 **All Cloud platforms:**
 - implemented login via KeyCloak to support integration with multiple SAML and OAUTH2 identity providers
-- added DLab version into WebUI
+- added Data Lab version into WebUI
 - augmented ‘Environment management’ page
 - added possibility to tag Notebook from UI
 - added possibility to terminate computational resources via scheduler
@@ -41,12 +41,12 @@
 - Notebook name should be unique per project for different users in another case it is impossible to operate Notebook with the same name after the first instance creation
 
 **Microsoft Azure:**
-- DLab deployment  is unavailable if Data Lake is enabled
+- Data Lab deployment  is unavailable if Data Lake is enabled
 - custom image creation from Notebook fails and deletes existed Notebook
 
 **Refer to the following link in order to view the other major/minor issues in v2.2:**
 
-[Apache DLab: known issues](https://issues.apache.org/jira/issues/?filter=12347602 "Apache DLab: known issues")
+[Apache Data Lab: known issues](https://issues.apache.org/jira/issues/?filter=12347602 "Apache Data Lab: known issues")
 
 ## Known issues caused by cloud provider limitations in v2.2
 **Microsoft Azure:**
@@ -57,4 +57,4 @@
 **GCP:**
 - resource name length should not exceed 64 chars
 - billing data is not available
-- **NOTE:** DLab has not been tested on GCP for Red Hat Enterprise Linux
+- **NOTE:** Data Lab has not been tested on GCP for Red Hat Enterprise Linux
diff --git a/USER_GUIDE.md b/USER_GUIDE.md
index 46e5847..f5e12ec 100644
--- a/USER_GUIDE.md
+++ b/USER_GUIDE.md
@@ -1,7 +1,7 @@
-What is DLAB?
+What is Data Lab?
 =============
 
-DLab is an essential toolset for analytics. It is a self-service Web Console, used to create and manage exploratory environments. It allows teams to spin up analytical environments with best of breed open-source tools just with a single click of the mouse. Once established, environment can be managed by an analytical team itself, leveraging simple and easy-to-use Web Interface.
+Data Lab is an essential toolset for analytics. It is a self-service Web Console, used to create and manage exploratory environments. It allows teams to spin up analytical environments with best of breed open-source tools just with a single click of the mouse. Once established, environment can be managed by an analytical team itself, leveraging simple and easy-to-use Web Interface.
 <p>See more at <a href="http://dlab.opensource.epam.com/" rel="nofollow">dlab.opensource.epam.com</a>.</p>
 
 ------------
@@ -50,11 +50,11 @@ DLab is an essential toolset for analytics. It is a self-service Web Console, us
 
 &nbsp; &nbsp; &nbsp; &nbsp; &nbsp; &nbsp; &nbsp; &nbsp; [Multiple Cloud endpoints](#multiple_cloud_endpoints)
 
-&nbsp; &nbsp; &nbsp; &nbsp; &nbsp; &nbsp; &nbsp; &nbsp; [Manage DLab quotas](#manage_dlab_quotas)
+&nbsp; &nbsp; &nbsp; &nbsp; &nbsp; &nbsp; &nbsp; &nbsp; [Manage Data Lab quotas](#manage_dlab_quotas)
 
-[DLab billing report](#billing_page)
+[Data Lab billing report](#billing_page)
 
-[DLab audit report](#audit_page)
+[Data Lab audit report](#audit_page)
 
 [Web UI filters](#filter)
 
@@ -62,9 +62,9 @@ DLab is an essential toolset for analytics. It is a self-service Web Console, us
 ---------
 # Login <a name="login"></a>
 
-As soon as DLab is deployed by an infrastructure provisioning team and you received DLab URL, your username and password – open DLab login page, fill in your credentials and hit Login.
+As soon as Data Lab is deployed by an infrastructure provisioning team and you received Data Lab URL, your username and password – open Data Lab login page, fill in your credentials and hit Login.
 
-DLab Web Application authenticates users against:
+Data Lab Web Application authenticates users against:
 
 -   OpenLdap;
 -   Cloud Identity and Access Management service user validation;
@@ -80,7 +80,7 @@ DLab Web Application authenticates users against:
 
 \* Please refer to official documentation from Amazon to figure out how to manage Access Keys for your AWS Account: http://docs.aws.amazon.com/general/latest/gr/managing-aws-access-keys.html
 
-To stop working with DLab - click on Log Out link at the top right corner of DLab.
+To stop working with Data Lab - click on Log Out link at the top right corner of Data Lab.
 
 After login user sees warning in case of exceeding quota or close to this limit.
 
@@ -95,7 +95,7 @@ After login user sees warning in case of exceeding quota or close to this limit.
 ----------------------------------
 # Create project <a name="setup_edge_node"></a>
 
-When you log into DLab Web interface, the first thing you need to do is to create a new project.
+When you log into Data Lab Web interface, the first thing you need to do is to create a new project.
 
 To do this click on “Upload” button on “Projects” page, select your personal public key (or click on "Generate" button), endpoint, group and hit “Create” button. Do not forget to save your private key.
 
@@ -103,9 +103,9 @@ To do this click on “Upload” button on “Projects” page, select your pers
     <img src="doc/upload_or_generate_user_key.png" alt="Upload or generate user key" width="100%">
 </p>
 
-Please note, that you need to have a key pair combination (public and private key) to work with DLab. To figure out how to create public and private key, please click on “Where can I get public key?” on “Projects” page. DLab build-in wiki page guides Windows, MasOS and Linux on how to generate SSH key pairs quickly.
+Please note, that you need to have a key pair combination (public and private key) to work with Data Lab. To figure out how to create public and private key, please click on “Where can I get public key?” on “Projects” page. Data Lab build-in wiki page guides Windows, MasOS and Linux on how to generate SSH key pairs quickly.
 
-Creation of Project starts after hitting "Create" button. This process is a one-time operation for each Data Scientist and it might take up-to 10 minutes for DLab to setup initial infrastructure for you. During this process project is in status "Creating".
+Creation of Project starts after hitting "Create" button. This process is a one-time operation for each Data Scientist and it might take up-to 10 minutes for Data Lab to setup initial infrastructure for you. During this process project is in status "Creating".
 
 As soon as Project is created, Data Scientist can create  notebook server on “List of Resources” page. The message “To start working, please create new environment” is appeared on “List of Resources” page:
 
@@ -120,7 +120,7 @@ As soon as Project is created, Data Scientist can create  notebook server on “
 To create new analytical environment from “List of Resources” page click on "Create new" button.
 
 The "Create analytical tool" popup shows up. Data Scientist can choose the preferred project, endpoint and analytical tool. Adding new analytical toolset is supported by architecture, so you can expect new templates to show up in upcoming releases.
-Currently by means of DLab, Data Scientists can select between any of the following templates:
+Currently by means of Data Lab, Data Scientists can select between any of the following templates:
 
 -   Jupyter
 -   Apache Zeppelin
@@ -147,11 +147,11 @@ Instance shape dropdown, contains configurable list of shapes, which should be c
 
 These groups have T-Shirt based shapes (configurable), that can help Data Scientist to either save money\* and leverage not very powerful shapes (for working with relatively small datasets), or that could boost the performance of analytics by selecting more powerful instance shape.
 
-\* Please refer to official documentation from Amazon that helps you to understand what [instance shapes](https://aws.amazon.com/ec2/instance-types/) are the most preferable in your particular DLAB setup. Also, you can use [AWS calculator](https://calculator.s3.amazonaws.com/index.html) to roughly estimate the cost of your environment.
+\* Please refer to official documentation from Amazon that helps you to understand what [instance shapes](https://aws.amazon.com/ec2/instance-types/) are the most preferable in your particular Data Lab setup. Also, you can use [AWS calculator](https://calculator.s3.amazonaws.com/index.html) to roughly estimate the cost of your environment.
 
-\* Please refer to official documentation from GCP that helps you to understand what [instance shapes](https://cloud.google.com/compute/docs/machine-types) are the most preferable in your particular DLAB setup. Also, you can use [GCP calculator](https://cloud.google.com/products/calculator) to roughly estimate the cost of your environment.
+\* Please refer to official documentation from GCP that helps you to understand what [instance shapes](https://cloud.google.com/compute/docs/machine-types) are the most preferable in your particular Data Lab setup. Also, you can use [GCP calculator](https://cloud.google.com/products/calculator) to roughly estimate the cost of your environment.
 
-\* Please refer to official documentation from Microsoft Azure that helps you to understand what [virtual machine shapes](https://azure.microsoft.com/en-us/pricing/details/virtual-machines/series/) are the most preferable in your particular DLAB setup. Also, you can use [Microsoft Azure calculator](https://azure.microsoft.com/en-us/pricing/calculator/?&ef_id=EAIaIQobChMItPmK5uj-6AIVj-iaCh0BFgVYEAAYASAAEgJ4KfD_BwE:G:s&OCID=AID2000606_SEM_UOMYUjFz&MarinID=UOMYUjFz_364338000380_microsoft%20 [...]
+\* Please refer to official documentation from Microsoft Azure that helps you to understand what [virtual machine shapes](https://azure.microsoft.com/en-us/pricing/details/virtual-machines/series/) are the most preferable in your particular Data Lab setup. Also, you can use [Microsoft Azure calculator](https://azure.microsoft.com/en-us/pricing/calculator/?&ef_id=EAIaIQobChMItPmK5uj-6AIVj-iaCh0BFgVYEAAYASAAEgJ4KfD_BwE:G:s&OCID=AID2000606_SEM_UOMYUjFz&MarinID=UOMYUjFz_364338000380_microsof [...]
 
 You can override the default configurations of local spark. The configuration object is referenced as a JSON file. To tune spark configuration check off "Spark configurations" check box and insert JSON format in the text box.
 
@@ -201,7 +201,7 @@ You need to wait for a while after resource and group choosing till list of all
 
 ![Libraries list loading](doc/notebook_list_libs.png)
 
-**Note:** Apt or Yum packages depend on your DLab OS family.
+**Note:** Apt or Yum packages depend on your Data Lab OS family.
 
 **Note:** In group Others you can find other Python (2/3) packages, which haven't classifiers of version.
 
@@ -330,13 +330,13 @@ This picture shows menu for creating Standalone Apache Spark cluster for Azure,
 
 On top of that you can override the default spark configurations for Standalone Apache Spark cluster by supplying a configuration object for applications when you create a cluster or have already created. The configuration object is referenced as a JSON file. To tune spark configuration check off "Cluster configurations" check box and insert JSON format in text box.
 
-If you click on "Create" button Computational resource creation kicks off. You see corresponding record on DLab Web UI in status "Creating":
+If you click on "Create" button Computational resource creation kicks off. You see corresponding record on Data Lab Web UI in status "Creating":
 
 ![Creating Computational resource](doc/emr_creating.png)
 
 Once Computational resources are provisioned, their status changes to "Running".
 
-After clicking on Computational resource name in DLab dashboard you see Computational resource details popup:
+After clicking on Computational resource name in Data Lab dashboard you see Computational resource details popup:
 
 <p align="center"> 
     <img src="doc/emr_info.png" alt="Computational resource info" width="480">
@@ -455,7 +455,7 @@ After login user is notified  that corresponding resources are about to be stopp
 
 ### Manage Git credentials <a name="git_creds"></a>
 
-To work with Git (pull, push) via UI tool (ungit) you could add multiple credentials in DLab UI, which are set on all running instances with analytical tools.
+To work with Git (pull, push) via UI tool (ungit) you could add multiple credentials in Data Lab UI, which are set on all running instances with analytical tools.
 
 When you click on the button "Git credentials" – following popup shows up:
 
@@ -488,7 +488,7 @@ On every analytical tool instance you can see Git UI tool (ungit):
 
 Before start working with Git repositories, you need to change working directory on the top of window to:
 
-**/home/dlab-user/** and press Enter.
+**/home/datalab-user/** and press Enter.
 
 After changing working directory you can create repository or better way - clone existing:
 
@@ -518,7 +518,7 @@ Also clicking on "Circle" button you can uncommit or revert changes.
 
 ### Bucket browser <a name="bucket"></a>
 
-You are able to access to cloud buckets via DLab Web UI.
+You are able to access to cloud buckets via Data Lab Web UI.
 There are two ways to open bucket browser:
 - clicking on Notebook name on the "List of resources" page, where there is an "Open bucket browser" link;
 - clicking on "Bucket browser" bucket on the "List of resources" page.
@@ -553,7 +553,7 @@ To do it click on "Add group" button. "Add group" popup shows up:
 </p>
 
 Roles consist of:
-- Administration - allow to execute administrative operation for the whole DLab or administrative operation only per project;
+- Administration - allow to execute administrative operation for the whole Data Lab or administrative operation only per project;
 - Billing - allow to view billing only the own resources or all users;
 - Bucket browser actions - allow to set permissions for cloud buckets if user only accesses via bucket browser
 - Compute - list of Compute types which are supposed for creation;
@@ -600,7 +600,7 @@ To terminate Edge node hit "Terminate edge node". After that confirm "OK" in con
 
 ## Environment management <a name="environment_management"></a>
 
-DLab Environment Management page is an administration page allowing adminstrator to see the list of all users environments and to stop/terminate all of them.
+Data Lab Environment Management page is an administration page allowing adminstrator to see the list of all users environments and to stop/terminate all of them.
 
 To access Environment management page either navigate to it via main menu:
 
@@ -645,16 +645,16 @@ Once all fields are filled in and you click on "Connect" button, you are able to
 
 Administrator can deactivate whole analytical environment via bin icon <img src="doc/bin_icon.png" alt="bin" width="15">. And all related instances change its satuses to "Terminating" and soon become "Terminated".
 
-### Manage DLab quotas <a name="manage_dlab_quotas"></a>
+### Manage Data Lab quotas <a name="manage_dlab_quotas"></a>
 
-Administrator can set quotas per project (monthly or total period) and for the whole DLab. To do it click on "Manage DLab quotas" button. "Manage DLab quotas" popup shows up. Administrator can see all active project:
+Administrator can set quotas per project (monthly or total period) and for the whole Data Lab. To do it click on "Manage Data Lab quotas" button. "Manage Data Lab quotas" popup shows up. Administrator can see all active project:
 
 <p align="center"> 
     <img src="doc/manage_environment.png" alt="Manage environment" width="520">
 </p>
 
-After filling fields and clicking on "Apply" button, new quotas are used for project and DLab.
-If project and DLab quotas are exceeded the warning shows up during login.
+After filling fields and clicking on "Apply" button, new quotas are used for project and Data Lab.
+If project and Data Lab quotas are exceeded the warning shows up during login.
 
 <p align="center" class="facebox-popup"> 
     <img src="doc/project_quota.png" alt="Exceeded project quota" width="400">
@@ -664,7 +664,7 @@ In such case user cannot create new instance and already "Running" instance chan
 
 --------------------------------
 
-# DLab Billing report <a name="billing_page"></a>
+# Data Lab Billing report <a name="billing_page"></a>
 
 On this page you can see all billing information, including all costs assosiated with service base name of SSN.
 
@@ -690,7 +690,7 @@ In the footer of billing report, you can see "Total" cost for all environments.
 
 --------------------------------
 
-# DLab Audit report <a name="audit_page"></a>
+# Data Lab Audit report <a name="audit_page"></a>
 
 On this page you can see change history, which have been done by any user.
 
diff --git a/doc/dlab_aws.png b/doc/datalab_aws.png
similarity index 100%
rename from doc/dlab_aws.png
rename to doc/datalab_aws.png
diff --git a/doc/dlab_azure.png b/doc/datalab_azure.png
similarity index 100%
rename from doc/dlab_azure.png
rename to doc/datalab_azure.png
diff --git a/doc/dlab_gcp.png b/doc/datalab_gcp.png
similarity index 100%
rename from doc/dlab_gcp.png
rename to doc/datalab_gcp.png
diff --git a/infrastructure-provisioning/scripts/POST_DEPLOYMENT.md b/infrastructure-provisioning/scripts/POST_DEPLOYMENT.md
index aee28b7..db90b70 100644
--- a/infrastructure-provisioning/scripts/POST_DEPLOYMENT.md
+++ b/infrastructure-provisioning/scripts/POST_DEPLOYMENT.md
@@ -1,4 +1,4 @@
-### Prerequisites for DLab post-deployment
+### Prerequisites for Data Lab post-deployment
 
 - Service account with following roles:
 ```
@@ -14,7 +14,7 @@ BigQuery Data Viewer
 BigQuery Job User
 ```
 - Google Cloud Storage JSON API should be enabled
-- Keycloak server with specific client for Dlab UI (could be dpeloyed with Kecylaok deployment script)
+- Keycloak server with specific client for Data Lab UI (could be dpeloyed with Kecylaok deployment script)
 
 Service account should be created manually and attached to the instance with post-deployment script.
 
@@ -24,7 +24,7 @@ To configure SSN node, following steps should be executed:
 
 - Connect to the instance via SSH and run the following commands:
 ```
-/usr/bin/python /opt/dlab/sources/infrastructure-provisioning/scripts/post-deployment_configuration.py
+/usr/bin/python /opt/datalab/sources/infrastructure-provisioning/scripts/post-deployment_configuration.py
     --keycloak_realm_name <value>
     --keycloak_auth_server_url <value>
     --keycloak_client_name <value>
@@ -38,7 +38,7 @@ List of parameters for SSN node post-deployment script:
 |-------------------------------|-------------------------------------------------------------------------------------|
 | keycloak\_realm\_name         | Keycloak realm name                                                                 |
 | keycloak\_auth\_server\_url   | Url of Keycloak auth server                                                         |
-| keycloak\_client\_name        | Name of client for Dlab UI                                                          |
-| keycloak\_client\_secret      | Secret of client for Dlab UI                                                        |
+| keycloak\_client\_name        | Name of client for Data Lab UI                                                          |
+| keycloak\_client\_secret      | Secret of client for Data Lab UI                                                        |
 | kkeycloak\_user               | Keycloak user with administrator permissions                                        |
 | keycloak\_admin\_password     | Password for Keycloak user with administrator permissions                           |
\ No newline at end of file
diff --git a/infrastructure-provisioning/scripts/deploy_dlab.py b/infrastructure-provisioning/scripts/deploy_datalab.py
similarity index 91%
rename from infrastructure-provisioning/scripts/deploy_dlab.py
rename to infrastructure-provisioning/scripts/deploy_datalab.py
index df9fb06..56e70a8 100644
--- a/infrastructure-provisioning/scripts/deploy_dlab.py
+++ b/infrastructure-provisioning/scripts/deploy_datalab.py
@@ -26,8 +26,8 @@ import argparse
 import os
 
 parser = argparse.ArgumentParser()
-parser.add_argument('--conf_service_base_name', type=str, help='unique name for DLab environment')
-parser.add_argument('--conf_network_type', type=str, default='', help='Define in which network DLab will be deployed. '
+parser.add_argument('--conf_service_base_name', type=str, help='unique name for Data Lab environment')
+parser.add_argument('--conf_network_type', type=str, default='', help='Define in which network Data Lab will be deployed. '
                                                                       'Possible options: public|private')
 parser.add_argument('--conf_vpc_cidr', type=str, default='', help='CIDR of VPC')
 parser.add_argument('--conf_vpc2_cidr', type=str, default='', help='CIDR of secondary VPC')
@@ -52,7 +52,7 @@ parser.add_argument('--gcp_zone', type=str, default='', help='GCP zone')
 parser.add_argument('--conf_os_family', type=str, default='',
                     help='Operating system type. Available options: debian, redhat')
 parser.add_argument('--conf_cloud_provider', type=str, default='',
-                    help='Where DLab should be deployed. Available options: aws, azure, gcp')
+                    help='Where Data Lab should be deployed. Available options: aws, azure, gcp')
 parser.add_argument('--ssn_hosted_zone_name', type=str, default='', help='Name of hosted zone')
 parser.add_argument('--ssn_hosted_zone_id', type=str, default='', help='ID of hosted zone')
 parser.add_argument('--ssn_subdomain', type=str, default='', help='Subdomain name')
@@ -78,8 +78,8 @@ parser.add_argument('--gcp_firewall_name', type=str, default='', help='One of mo
 parser.add_argument('--key_path', type=str, default='', help='Path to admin key (WITHOUT KEY NAME)')
 parser.add_argument('--conf_key_name', type=str, default='', help='Admin key name (WITHOUT ".pem")')
 parser.add_argument('--workspace_path', type=str, default='', help='Admin key name (WITHOUT ".pem")')
-parser.add_argument('--conf_tag_resource_id', type=str, default='dlab', help='The name of user tag')
-parser.add_argument('--conf_billing_tag', type=str, default='dlab', help='Billing tag')
+parser.add_argument('--conf_tag_resource_id', type=str, default='datalab', help='The name of user tag')
+parser.add_argument('--conf_billing_tag', type=str, default='datalab', help='Billing tag')
 parser.add_argument('--aws_ssn_instance_size', type=str, default='t2.large', help='The SSN instance shape')
 parser.add_argument('--azure_ssn_instance_size', type=str, default='Standard_DS2_v2', help='The SSN instance shape')
 parser.add_argument('--gcp_ssn_instance_size', type=str, default='n1-standard-2', help='The SSN instance shape')
@@ -100,14 +100,14 @@ parser.add_argument('--azure_locale', type=str, default='', help='Azure locale')
 parser.add_argument('--azure_application_id', type=str, default='', help='Azure login application ID')
 parser.add_argument('--azure_validate_permission_scope', type=str, default='true', help='Azure permission scope '
                                                                                         'validation(true|false).')
-parser.add_argument('--azure_oauth2_enabled', type=str, default='false', help='Using OAuth2 for logging in DLab')
+parser.add_argument('--azure_oauth2_enabled', type=str, default='false', help='Using OAuth2 for logging in Data Lab')
 parser.add_argument('--azure_region_info', type=str, default='', help='Azure region info')
 parser.add_argument('--azure_source_vpc_name', type=str, default='', help='Azure VPC source Name')
 parser.add_argument('--azure_source_resource_group_name', type=str, default='', help='Azure source resource group')
 parser.add_argument('--gcp_project_id', type=str, default='', help='The project ID in Google Cloud Platform')
 parser.add_argument('--gcp_service_account_path', type=str, default='', help='The project ID in Google Cloud Platform')
-parser.add_argument('--dlab_id', type=str, default="'resource_tags_user_user_tag'", help='Column name in report file that contains '
-                                                                           'dlab id tag')
+parser.add_argument('--datalab_id', type=str, default="'resource_tags_user_user_tag'", help='Column name in report file that contains '
+                                                                           'datalab id tag')
 parser.add_argument('--usage_date', type=str, default='line_item_usage_start_date', help='Column name in report file that contains '
                                                                              'usage date tag')
 parser.add_argument('--product', type=str, default='product_product_name', help='Column name in report file that contains '
@@ -118,7 +118,7 @@ parser.add_argument('--usage', type=str, default='line_item_usage_amount', help=
                                                                        'usage tag')
 parser.add_argument('--cost', type=str, default='line_item_blended_cost', help='Column name in report file that contains cost tag')
 parser.add_argument('--resource_id', type=str, default='line_item_resource_id', help='Column name in report file that contains '
-                                                                          'dlab resource id tag')
+                                                                          'datalab resource id tag')
 parser.add_argument('--ldap_hostname', type=str, default='localhost', help='Ldap instance hostname')
 parser.add_argument('--ldap_dn', type=str, default='dc=example,dc=com',
                     help='Ldap distinguished name')
@@ -126,11 +126,11 @@ parser.add_argument('--ldap_ou', type=str, default='ou=People', help='Ldap organ
 parser.add_argument('--ldap_service_username', type=str, default='cn=service-user', help='Ldap service user name')
 parser.add_argument('--ldap_service_password', type=str, default='service-user-password',
                     help='Ldap password for admin user')
-parser.add_argument('--keycloak_realm_name', type=str, default='dlab', help='Keycloak Realm name')
-parser.add_argument('--keycloak_auth_server_url', type=str, default='dlab', help='Keycloak auth server URL')
-parser.add_argument('--keycloak_client_name', type=str, default='dlab', help='Keycloak client name')
-parser.add_argument('--keycloak_client_secret', type=str, default='dlab', help='Keycloak client secret')
-parser.add_argument('--keycloak_user', type=str, default='dlab', help='Keycloak user')
+parser.add_argument('--keycloak_realm_name', type=str, default='datalab', help='Keycloak Realm name')
+parser.add_argument('--keycloak_auth_server_url', type=str, default='datalab', help='Keycloak auth server URL')
+parser.add_argument('--keycloak_client_name', type=str, default='datalab', help='Keycloak client name')
+parser.add_argument('--keycloak_client_secret', type=str, default='datalab', help='Keycloak client secret')
+parser.add_argument('--keycloak_user', type=str, default='datalab', help='Keycloak user')
 parser.add_argument('--keycloak_user_password', type=str, default='keycloak-user-password', help='Keycloak user password')
 parser.add_argument('--tags', type=str, default='line_item_operation,line_item_line_item_description', help='Column name in report file that '
                                                                                   'contains tags')
@@ -168,7 +168,7 @@ def generate_docker_command():
     elif args.conf_cloud_provider == 'gcp':
         command.append('-v {}:/root/service_account.json '.format(args.gcp_service_account_path))
     if args.ssl_cert_path != '' and args.ssl_key_path != '':
-        command.append('-v {}:/root/certs/dlab.crt -v {}:/root/certs/dlab.key '.format(args.ssl_cert_path,
+        command.append('-v {}:/root/certs/datalab.crt -v {}:/root/certs/datalab.key '.format(args.ssl_cert_path,
                                                                                        args.ssl_key_path))
     attrs = vars(args)
     skipped_parameters = ['action', 'key_path', 'workspace_path', 'gcp_service_account_path', 'ssl_cert_path',
@@ -177,7 +177,7 @@ def generate_docker_command():
         if attrs[i] and i not in skipped_parameters:
             command.append("-e '{}={}' ".format(i, attrs[i]))
     command.append('-e "conf_resource=ssn" ')
-    command.append('docker.dlab-ssn ')
+    command.append('docker.datalab-ssn ')
     command.append('--action {} '.format(args.action))
     return docker_command.join(command)
 
@@ -187,19 +187,19 @@ def build_docker_images(args):
     with lcd(args.workspace_path):
         local('sudo docker build --build-arg OS={0} --build-arg SRC_PATH="infrastructure-provisioning/src/" --file '
               'infrastructure-provisioning/src/general/files/{1}/'
-              'base_Dockerfile -t docker.dlab-base .'.format(args.conf_os_family, args.conf_cloud_provider))
+              'base_Dockerfile -t docker.datalab-base .'.format(args.conf_os_family, args.conf_cloud_provider))
         local('sudo docker build --build-arg OS={0} --file infrastructure-provisioning/src/general/files/{1}/'
-              'ssn_Dockerfile -t docker.dlab-ssn .'.format(args.conf_os_family, args.conf_cloud_provider))
+              'ssn_Dockerfile -t docker.datalab-ssn .'.format(args.conf_os_family, args.conf_cloud_provider))
 
 
-def deploy_dlab(args):
+def deploy_datalab(args):
     # Creating SSN node
     docker_command = generate_docker_command()
     local(docker_command)
 
 
-def terminate_dlab(args):
-    # Dropping Dlab environment with selected infrastructure tag
+def terminate_datalab(args):
+    # Dropping datalab environment with selected infrastructure tag
     docker_command = generate_docker_command()
     local(docker_command)
 
@@ -211,10 +211,10 @@ if __name__ == "__main__":
     if args.action == 'build':
         build_docker_images(args)
     elif args.action == 'deploy':
-        deploy_dlab(args)
+        deploy_datalab(args)
     elif args.action == 'create':
         build_docker_images(args)
-        deploy_dlab(args)
+        deploy_datalab(args)
     elif args.action == 'terminate':
         build_docker_images(args)
-        terminate_dlab(args)
+        terminate_datalab(args)
diff --git a/infrastructure-provisioning/scripts/deploy_repository/deploy_repository.py b/infrastructure-provisioning/scripts/deploy_repository/deploy_repository.py
index 0c761a5..9c70f7d 100644
--- a/infrastructure-provisioning/scripts/deploy_repository/deploy_repository.py
+++ b/infrastructure-provisioning/scripts/deploy_repository/deploy_repository.py
@@ -42,9 +42,9 @@ parser.add_argument('--vpc_cidr', type=str, default='172.31.0.0/16', help='Cidr
 parser.add_argument('--subnet_id', type=str, default='', help='AWS Subnet ID')
 parser.add_argument('--subnet_cidr', type=str, default='172.31.0.0/24', help='Cidr of subnet')
 parser.add_argument('--sg_id', type=str, default='', help='AWS VPC ID')
-parser.add_argument('--billing_tag', type=str, default='product:dlab', help='Tag in format: "Key1:Value1"')
+parser.add_argument('--billing_tag', type=str, default='product:datalab', help='Tag in format: "Key1:Value1"')
 parser.add_argument('--additional_tags', type=str, default='', help='Tags in format: "Key1:Value1;Key2:Value2"')
-parser.add_argument('--tag_resource_id', type=str, default='dlab', help='The name of user tag')
+parser.add_argument('--tag_resource_id', type=str, default='datalab', help='The name of user tag')
 parser.add_argument('--allowed_ip_cidr', type=str, default='', help='Comma-separated CIDR of IPs which will have '
                                                                     'access to the instance')
 parser.add_argument('--key_name', type=str, default='', help='Key name (WITHOUT ".pem")')
@@ -60,7 +60,7 @@ parser.add_argument('--efs_enabled', type=str, default='False', help="True - use
 parser.add_argument('--efs_id', type=str, default='', help="ID of AWS EFS")
 parser.add_argument('--primary_disk_size', type=str, default='30', help="Disk size of primary volume")
 parser.add_argument('--additional_disk_size', type=str, default='50', help="Disk size of additional volume")
-parser.add_argument('--dlab_conf_file_path', type=str, default='', help="Full path to DLab conf file")
+parser.add_argument('--datalab_conf_file_path', type=str, default='', help="Full path to Data Lab conf file")
 parser.add_argument('--nexus_admin_password', type=str, default='', help="Password for Nexus admin user")
 parser.add_argument('--nexus_service_user_name', type=str, default='dlab-nexus', help="Nexus service user name")
 parser.add_argument('--nexus_service_user_password', type=str, default='', help="Nexus service user password")
@@ -794,7 +794,7 @@ def ensure_ssh_user(initial_user):
             sudo('touch /home/{}/.ssh_user_ensured'.format(initial_user))
     except Exception as err:
         traceback.print_exc(file=sys.stdout)
-        print('Error with creating dlab-user: {}'.format(str(err)))
+        print('Error with creating datalab-user: {}'.format(str(err)))
         raise Exception
 
 
@@ -1116,7 +1116,7 @@ def configure_ssl():
             sudo('echo "[ subject_alt_name ]" >> /tmp/openssl.cnf')
             sudo('echo "{}" >> /tmp/openssl.cnf'.format(subject_alt_name))
             sudo('openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/repository.key '
-                 '-out /etc/ssl/certs/repository.crt -subj "/C=US/ST=US/L=US/O=dlab/CN={}" -config '
+                 '-out /etc/ssl/certs/repository.crt -subj "/C=US/ST=US/L=US/O=datalab/CN={}" -config '
                  '/tmp/openssl.cnf -extensions subject_alt_name'.format(hostname))
             sudo('openssl dhparam -out /etc/ssl/certs/dhparam.pem 2048')
             sudo('touch /home/{}/.ensure_dir/ssl_ensured'.format(configuration['conf_os_user']))
@@ -1276,8 +1276,8 @@ def prepare_images():
                 sudo('docker build --file Dockerfile -t pre-base .')
             sudo('docker login -u {0} -p {1} localhost:8083'.format(args.nexus_service_user_name,
                                                                     args.nexus_service_user_password))
-            sudo('docker tag pre-base localhost:8083/dlab-pre-base')
-            sudo('docker push localhost:8083/dlab-pre-base')
+            sudo('docker tag pre-base localhost:8083/datalab-pre-base')
+            sudo('docker push localhost:8083/datalab-pre-base')
             sudo('touch /home/{}/.ensure_dir/images_prepared'.format(configuration['conf_os_user']))
     except Exception as err:
         traceback.print_exc()
@@ -1327,12 +1327,12 @@ if __name__ == "__main__":
     pre_defined_subnet = True
     pre_defined_sg = True
     pre_defined_efs = True
-    if args.action != 'terminate' and args.dlab_conf_file_path == '':
-        print('Please provide argument --dlab_conf_file_path ! Aborting... ')
+    if args.action != 'terminate' and args.datalab_conf_file_path == '':
+        print('Please provide argument --datalab_conf_file_path ! Aborting... ')
         sys.exit(1)
     configuration = dict()
     config = SafeConfigParser()
-    config.read(args.dlab_conf_file_path)
+    config.read(args.datalab_conf_file_path)
     for section in config.sections():
         for option in config.options(section):
             varname = "{0}_{1}".format(section, option)
@@ -1695,7 +1695,7 @@ if __name__ == "__main__":
         env.host_string = 'ubuntu@' + ec2_ip_address
         print("CONFIGURE LOCAL REPOSITORY")
         try:
-            print('CREATING DLAB-USER')
+            print('CREATING DATA LAB USER')
             ensure_ssh_user('ubuntu')
             env.host_string = configuration['conf_os_user'] + '@' + ec2_ip_address
 
@@ -1723,7 +1723,7 @@ if __name__ == "__main__":
             print('INSTALLING DOCKER')
             install_docker()
 
-            print('PREPARING DLAB DOCKER IMAGES')
+            print('PREPARING DATA LAB DOCKER IMAGES')
             prepare_images()
 
             print('INSTALLING SQUID')
diff --git a/infrastructure-provisioning/scripts/post-deployment_configuration.py b/infrastructure-provisioning/scripts/post-deployment_configuration.py
index 051258f..6839112 100644
--- a/infrastructure-provisioning/scripts/post-deployment_configuration.py
+++ b/infrastructure-provisioning/scripts/post-deployment_configuration.py
@@ -44,29 +44,29 @@ if __name__ == "__main__":
 
     print("Getting cloud and instance parameters")
     server_external_ip = requests.get('http://metadata/computeMetadata/v1/instance/network-interfaces/0/access-configs/0/external-ip', headers=headers).text
-    dlab_sbn = requests.get('http://metadata/computeMetadata/v1/instance/name', headers=headers).text
-    dlab_ssn_static_ip_name = dlab_sbn + '-ip'
-    dlab_zone = requests.get('http://metadata/computeMetadata/v1/instance/zone', headers=headers).text.split('/')[-1]
-    dlab_region = '-'.join(dlab_zone.split('-', 2)[:2])
-    deployment_vpcId = local("sudo gcloud compute instances describe {0} --zone {1} --format 'value(networkInterfaces.network)' | sed 's|.*/||'".format(dlab_sbn, dlab_zone), capture=True)
-    deployment_subnetId = local("sudo gcloud compute instances describe {0} --zone {1} --format 'value(networkInterfaces.subnetwork)' | sed 's|.*/||'".format(dlab_sbn, dlab_zone), capture=True)
+    datalab_sbn = requests.get('http://metadata/computeMetadata/v1/instance/name', headers=headers).text
+    datalab_ssn_static_ip_name = datalab_sbn + '-ip'
+    datalab_zone = requests.get('http://metadata/computeMetadata/v1/instance/zone', headers=headers).text.split('/')[-1]
+    datalab_region = '-'.join(datalab_zone.split('-', 2)[:2])
+    deployment_vpcId = local("sudo gcloud compute instances describe {0} --zone {1} --format 'value(networkInterfaces.network)' | sed 's|.*/||'".format(datalab_sbn, datalab_zone), capture=True)
+    deployment_subnetId = local("sudo gcloud compute instances describe {0} --zone {1} --format 'value(networkInterfaces.subnetwork)' | sed 's|.*/||'".format(datalab_sbn, datalab_zone), capture=True)
     gcp_projectId = requests.get('http://metadata/computeMetadata/v1/project/project-id', headers=headers).text
     keycloak_redirectUri = 'http://{}'.format(server_external_ip)
 
-    print("Generationg SSH keyfile for dlab-user")
+    print("Generationg SSH keyfile for datalab-user")
     key = RSA.generate(2048)
-    local("sudo sh -c 'echo \"{}\" > /home/dlab-user/keys/KEY-FILE.pem'".format(key.exportKey('PEM')))
-    local("sudo chmod 600 /home/dlab-user/keys/KEY-FILE.pem")
+    local("sudo sh -c 'echo \"{}\" > /home/datalab-user/keys/KEY-FILE.pem'".format(key.exportKey('PEM')))
+    local("sudo chmod 600 /home/datalab-user/keys/KEY-FILE.pem")
     pubkey = key.publickey()
-    local("sudo sh -c 'echo \"{}\" > /home/dlab-user/.ssh/authorized_keys'".format(pubkey.exportKey('OpenSSH')))
+    local("sudo sh -c 'echo \"{}\" > /home/datalab-user/.ssh/authorized_keys'".format(pubkey.exportKey('OpenSSH')))
 
     print("Generationg MongoDB password")
     mongo_pwd = uuid.uuid4().hex
     try:
-        local("sudo echo -e 'db.changeUserPassword(\"admin\", \"{}\")' | mongo dlabdb --port 27017 -u admin -p MONGO_PASSWORD".format(mongo_pwd))
-        local('sudo sed -i "s|MONGO_PASSWORD|{}|g" /opt/dlab/conf/billing.yml'.format(mongo_pwd))
+        local("sudo echo -e 'db.changeUserPassword(\"admin\", \"{}\")' | mongo datalabdb --port 27017 -u admin -p MONGO_PASSWORD".format(mongo_pwd))
+        local('sudo sed -i "s|MONGO_PASSWORD|{}|g" /opt/datalab/conf/billing.yml'.format(mongo_pwd))
 
-        local('sudo sed -i "s|MONGO_PASSWORD|{}|g" /opt/dlab/conf/ssn.yml'.format(mongo_pwd))
+        local('sudo sed -i "s|MONGO_PASSWORD|{}|g" /opt/datalab/conf/ssn.yml'.format(mongo_pwd))
     except:
         print('Mongo password was already changed')
 
@@ -76,63 +76,63 @@ if __name__ == "__main__":
     if static_address_exist:
         print('Address is already static')
     else:
-        local("sudo gcloud compute addresses create {0} --addresses {1} --region {2}".format(dlab_ssn_static_ip_name,
+        local("sudo gcloud compute addresses create {0} --addresses {1} --region {2}".format(datalab_ssn_static_ip_name,
                                                                                              server_external_ip,
-                                                                                             dlab_region), capture=True)
+                                                                                             datalab_region), capture=True)
 
     print("Overwriting SSN parameters")
 
     if deployment_subnetId == 'default':
-        local('sudo sed -i "s|# user_subnets_range|user_subnets_range|g" /opt/dlab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini')
+        local('sudo sed -i "s|# user_subnets_range|user_subnets_range|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini')
 
-    local('sudo sed -i "s|DLAB_SBN|{}|g" /opt/dlab/conf/self-service.yml'.format(dlab_sbn))
-    local('sudo sed -i "s|KEYCLOAK_REDIRECTURI|{}|g" /opt/dlab/conf/self-service.yml'.format(keycloak_redirectUri))
-    local('sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/dlab/conf/self-service.yml'.format(args.keycloak_realm_name))
-    local('sudo sed -i "s|KEYCLOAK_AUTH_SERVER_URL|{}|g" /opt/dlab/conf/self-service.yml'.format(
+    local('sudo sed -i "s|DATA_LAB_SBN|{}|g" /opt/datalab/conf/self-service.yml'.format(datalab_sbn))
+    local('sudo sed -i "s|KEYCLOAK_REDIRECTURI|{}|g" /opt/datalab/conf/self-service.yml'.format(keycloak_redirectUri))
+    local('sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/datalab/conf/self-service.yml'.format(args.keycloak_realm_name))
+    local('sudo sed -i "s|KEYCLOAK_AUTH_SERVER_URL|{}|g" /opt/datalab/conf/self-service.yml'.format(
         args.keycloak_auth_server_url))
-    local('sudo sed -i "s|KEYCLOAK_CLIENT_NAME|{}|g" /opt/dlab/conf/self-service.yml'.format(args.keycloak_client_name))
-    local('sudo sed -i "s|KEYCLOAK_CLIENT_SECRET|{}|g" /opt/dlab/conf/self-service.yml'.format(
+    local('sudo sed -i "s|KEYCLOAK_CLIENT_NAME|{}|g" /opt/datalab/conf/self-service.yml'.format(args.keycloak_client_name))
+    local('sudo sed -i "s|KEYCLOAK_CLIENT_SECRET|{}|g" /opt/datalab/conf/self-service.yml'.format(
         args.keycloak_client_secret))
 
-    local('sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/dlab/conf/provisioning.yml'.format(args.keycloak_realm_name))
-    local('sudo sed -i "s|KEYCLOAK_AUTH_SERVER_URL|{}|g" /opt/dlab/conf/provisioning.yml'.format(
+    local('sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/datalab/conf/provisioning.yml'.format(args.keycloak_realm_name))
+    local('sudo sed -i "s|KEYCLOAK_AUTH_SERVER_URL|{}|g" /opt/datalab/conf/provisioning.yml'.format(
         args.keycloak_auth_server_url))
-    local('sudo sed -i "s|KEYCLOAK_CLIENT_NAME|{}|g" /opt/dlab/conf/provisioning.yml'.format(args.keycloak_client_name))
-    local('sudo sed -i "s|KEYCLOAK_CLIENT_SECRET|{}|g" /opt/dlab/conf/provisioning.yml'.format(
+    local('sudo sed -i "s|KEYCLOAK_CLIENT_NAME|{}|g" /opt/datalab/conf/provisioning.yml'.format(args.keycloak_client_name))
+    local('sudo sed -i "s|KEYCLOAK_CLIENT_SECRET|{}|g" /opt/datalab/conf/provisioning.yml'.format(
         args.keycloak_client_secret))
-    local('sudo sed -i "s|DLAB_SBN|{}|g" /opt/dlab/conf/provisioning.yml'.format(dlab_sbn))
-    local('sudo sed -i "s|SUBNET_ID|{}|g" /opt/dlab/conf/provisioning.yml'.format(deployment_subnetId))
-    local('sudo sed -i "s|DLAB_REGION|{}|g" /opt/dlab/conf/provisioning.yml'.format(dlab_region))
-    local('sudo sed -i "s|DLAB_ZONE|{}|g" /opt/dlab/conf/provisioning.yml'.format(dlab_zone))
-    local('sudo sed -i "s|SSN_VPC_ID|{}|g" /opt/dlab/conf/provisioning.yml'.format(deployment_vpcId))
-    local('sudo sed -i "s|GCP_PROJECT_ID|{}|g" /opt/dlab/conf/provisioning.yml'.format(gcp_projectId))
-    local('sudo sed -i "s|KEYCLOAK_USER|{}|g" /opt/dlab/conf/provisioning.yml'.format(args.keycloak_user))
-    local('sudo sed -i "s|KEYCLOAK_ADMIN_PASSWORD|{}|g" /opt/dlab/conf/provisioning.yml'.format(
+    local('sudo sed -i "s|DATA_LAB_SBN|{}|g" /opt/datalab/conf/provisioning.yml'.format(datalab_sbn))
+    local('sudo sed -i "s|SUBNET_ID|{}|g" /opt/datalab/conf/provisioning.yml'.format(deployment_subnetId))
+    local('sudo sed -i "s|DATA_LAB_REGION|{}|g" /opt/datalab/conf/provisioning.yml'.format(datalab_region))
+    local('sudo sed -i "s|DATA_LAB_ZONE|{}|g" /opt/datalab/conf/provisioning.yml'.format(datalab_zone))
+    local('sudo sed -i "s|SSN_VPC_ID|{}|g" /opt/datalab/conf/provisioning.yml'.format(deployment_vpcId))
+    local('sudo sed -i "s|GCP_PROJECT_ID|{}|g" /opt/datalab/conf/provisioning.yml'.format(gcp_projectId))
+    local('sudo sed -i "s|KEYCLOAK_USER|{}|g" /opt/datalab/conf/provisioning.yml'.format(args.keycloak_user))
+    local('sudo sed -i "s|KEYCLOAK_ADMIN_PASSWORD|{}|g" /opt/datalab/conf/provisioning.yml'.format(
         args.keycloak_admin_password))
 
-    local('sudo sed -i "s|DLAB_SBN|{}|g" /opt/dlab/conf/billing.yml'.format(dlab_sbn))
+    local('sudo sed -i "s|DATA_LAB_SBN|{}|g" /opt/datalab/conf/billing.yml'.format(datalab_sbn))
 
-    local('sudo sed -i "s|DLAB_SBN|{}|g" /opt/dlab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(dlab_sbn))
-    local('sudo sed -i "s|GCP_PROJECT_ID|{}|g" /opt/dlab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(gcp_projectId))
-    local('sudo sed -i "s|DLAB_REGION|{}|g" /opt/dlab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(dlab_region))
-    local('sudo sed -i "s|DLAB_ZONE|{}|g" /opt/dlab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(dlab_zone))
-    local('sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/dlab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(args.keycloak_realm_name))
-    local('sudo sed -i "s|KEYCLOAK_AUTH_SERVER_URL|{}|g" /opt/dlab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(args.keycloak_auth_server_url))
-    local('sudo sed -i "s|KEYCLOAK_CLIENT_NAME|{}|g" /opt/dlab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(args.keycloak_client_name))
-    local('sudo sed -i "s|KEYCLOAK_CLIENT_SECRET|{}|g" /opt/dlab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(args.keycloak_client_secret))
-    local('sudo sed -i "s|KEYCLOAK_USER|{}|g" /opt/dlab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(args.keycloak_user))
-    local('sudo sed -i "s|KEYCLOAK_ADMIN_PASSWORD|{}|g" /opt/dlab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(args.keycloak_admin_password))
+    local('sudo sed -i "s|DATA_LAB_SBN|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(datalab_sbn))
+    local('sudo sed -i "s|GCP_PROJECT_ID|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(gcp_projectId))
+    local('sudo sed -i "s|DATA_LAB_REGION|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(datalab_region))
+    local('sudo sed -i "s|DATA_LAB_ZONE|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(datalab_zone))
+    local('sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(args.keycloak_realm_name))
+    local('sudo sed -i "s|KEYCLOAK_AUTH_SERVER_URL|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(args.keycloak_auth_server_url))
+    local('sudo sed -i "s|KEYCLOAK_CLIENT_NAME|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(args.keycloak_client_name))
+    local('sudo sed -i "s|KEYCLOAK_CLIENT_SECRET|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(args.keycloak_client_secret))
+    local('sudo sed -i "s|KEYCLOAK_USER|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(args.keycloak_user))
+    local('sudo sed -i "s|KEYCLOAK_ADMIN_PASSWORD|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(args.keycloak_admin_password))
 
     print('SSL certificate generating')
     keystore_passwd = uuid.uuid4().hex
-    local('sudo rm /home/dlab-user/keys/ssn*')
-    local('sudo rm /etc/ssl/certs/dlab*')
+    local('sudo rm /home/datalab-user/keys/ssn*')
+    local('sudo rm /etc/ssl/certs/datalab*')
     local('sudo keytool -delete -noprompt -trustcacerts -alias ssn -storepass changeit -keystore /usr/lib/jvm/java-8-openjdk-amd64/jre/lib/security/cacerts')
-    local('sudo openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/dlab.key -out /etc/ssl/certs/dlab.crt -subj "/C=US/ST=US/L=US/O=dlab/CN=localhost/subjectAltName={0}"'.format(server_external_ip))
-    local('sudo openssl pkcs12 -export -in /etc/ssl/certs/dlab.crt -inkey /etc/ssl/certs/dlab.key -name ssn -out /home/dlab-user/keys/ssn.p12 -password pass:{0}'.format(keystore_passwd))
-    local('sudo keytool -importkeystore -srckeystore /home/dlab-user/keys/ssn.p12 -srcstoretype PKCS12 -alias ssn -destkeystore /home/dlab-user/keys/ssn.keystore.jks -deststorepass {0} -srcstorepass {0}'.format(keystore_passwd))
-    local('sudo keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/dlab.crt -noprompt -storepass changeit -keystore /usr/lib/jvm/java-8-openjdk-amd64/jre/lib/security/cacerts')
-    local('sudo sed -i "s|KEYSTORE_PASSWORD|{}|g" /opt/dlab/conf/ssn.yml'.format(keystore_passwd))
+    local('sudo openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/datalab.key -out /etc/ssl/certs/datalab.crt -subj "/C=US/ST=US/L=US/O=datalab/CN=localhost/subjectAltName={0}"'.format(server_external_ip))
+    local('sudo openssl pkcs12 -export -in /etc/ssl/certs/datalab.crt -inkey /etc/ssl/certs/datalab.key -name ssn -out /home/datalab-user/keys/ssn.p12 -password pass:{0}'.format(keystore_passwd))
+    local('sudo keytool -importkeystore -srckeystore /home/datalab-user/keys/ssn.p12 -srcstoretype PKCS12 -alias ssn -destkeystore /home/datalab-user/keys/ssn.keystore.jks -deststorepass {0} -srcstorepass {0}'.format(keystore_passwd))
+    local('sudo keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt -storepass changeit -keystore /usr/lib/jvm/java-8-openjdk-amd64/jre/lib/security/cacerts')
+    local('sudo sed -i "s|KEYSTORE_PASSWORD|{}|g" /opt/datalab/conf/ssn.yml'.format(keystore_passwd))
 
     print('Nginx configuration updating')
     local('sudo sed -i "s|SERVER_IP|{}|g" /etc/nginx/conf.d/nginx_proxy.conf'.format(server_external_ip))
@@ -140,8 +140,8 @@ if __name__ == "__main__":
     local('sudo supervisorctl restart all')
 
     print('Rebuilding docker images')
-    local('cd /opt/dlab/sources/infrastructure-provisioning/src/ && sudo docker-build all')
+    local('cd /opt/datalab/sources/infrastructure-provisioning/src/ && sudo docker-build all')
 
     print('[SUMMARY]')
-    print('Mongo password stored in /opt/dlab/conf/ssn.yml')
-    print('SSH key for dlab-user stored in /home/dlab-user/keys/KEY-FILE.pem')
\ No newline at end of file
+    print('Mongo password stored in /opt/datalab/conf/ssn.yml')
+    print('SSH key for datalab-user stored in /home/datalab-user/keys/KEY-FILE.pem')
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/base/scripts/install_prerequisites.py b/infrastructure-provisioning/src/base/scripts/install_prerequisites.py
index e0c3c85..432dd2a 100644
--- a/infrastructure-provisioning/src/base/scripts/install_prerequisites.py
+++ b/infrastructure-provisioning/src/base/scripts/install_prerequisites.py
@@ -24,8 +24,8 @@
 from fabric.api import *
 import argparse
 import json
-from dlab.fab import *
-from dlab.common_lib import *
+from datalab.fab import *
+from datalab.common_lib import *
 from fabric.contrib.files import exists
 import sys
 import os
diff --git a/infrastructure-provisioning/src/base/scripts/install_user_key.py b/infrastructure-provisioning/src/base/scripts/install_user_key.py
index f079707..ae3bbdc 100644
--- a/infrastructure-provisioning/src/base/scripts/install_user_key.py
+++ b/infrastructure-provisioning/src/base/scripts/install_user_key.py
@@ -23,7 +23,7 @@
 
 from fabric.api import *
 from fabric.contrib.files import exists
-from dlab.fab import *
+from datalab.fab import *
 import argparse
 import json
 import sys
diff --git a/infrastructure-provisioning/src/dataengine-service/fabfile.py b/infrastructure-provisioning/src/dataengine-service/fabfile.py
index c21555b..ee1ce6b 100644
--- a/infrastructure-provisioning/src/dataengine-service/fabfile.py
+++ b/infrastructure-provisioning/src/dataengine-service/fabfile.py
@@ -24,9 +24,9 @@
 import json
 import time
 from fabric.api import *
-from dlab.fab import *
-from dlab.meta_lib import *
-from dlab.actions_lib import *
+from datalab.fab import *
+from datalab.meta_lib import *
+from datalab.actions_lib import *
 import sys
 import os
 import uuid
diff --git a/infrastructure-provisioning/src/dataengine/fabfile.py b/infrastructure-provisioning/src/dataengine/fabfile.py
index 319c6f1..6bd4947 100644
--- a/infrastructure-provisioning/src/dataengine/fabfile.py
+++ b/infrastructure-provisioning/src/dataengine/fabfile.py
@@ -24,9 +24,9 @@
 import json
 import time
 from fabric.api import *
-from dlab.fab import *
-from dlab.meta_lib import *
-from dlab.actions_lib import *
+from datalab.fab import *
+from datalab.meta_lib import *
+from datalab.actions_lib import *
 import sys
 import os
 import uuid
diff --git a/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py b/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
index 2089613..0432d24 100644
--- a/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
+++ b/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
@@ -24,9 +24,9 @@
 import argparse
 import json
 import sys
-from dlab.notebook_lib import *
-from dlab.actions_lib import *
-from dlab.fab import *
+from datalab.notebook_lib import *
+from datalab.actions_lib import *
+from datalab.fab import *
 import os
 
 
diff --git a/infrastructure-provisioning/src/deeplearning/fabfile.py b/infrastructure-provisioning/src/deeplearning/fabfile.py
index cd9fbb5..c92efc2 100644
--- a/infrastructure-provisioning/src/deeplearning/fabfile.py
+++ b/infrastructure-provisioning/src/deeplearning/fabfile.py
@@ -24,9 +24,9 @@
 import logging
 import json
 import sys
-from dlab.fab import *
-from dlab.meta_lib import *
-from dlab.actions_lib import *
+from datalab.fab import *
+from datalab.meta_lib import *
+from datalab.actions_lib import *
 import os
 import uuid
 
diff --git a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
index b2e93d0..27bd5f1 100644
--- a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
+++ b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
@@ -21,10 +21,10 @@
 #
 # ******************************************************************************
 
-from dlab.actions_lib import *
-from dlab.common_lib import *
-from dlab.notebook_lib import *
-from dlab.fab import *
+from datalab.actions_lib import *
+from datalab.common_lib import *
+from datalab.notebook_lib import *
+from datalab.fab import *
 from fabric.api import *
 from fabric.contrib.files import exists
 import argparse
diff --git a/infrastructure-provisioning/src/edge/fabfile.py b/infrastructure-provisioning/src/edge/fabfile.py
index 66a656b..7260ad6 100644
--- a/infrastructure-provisioning/src/edge/fabfile.py
+++ b/infrastructure-provisioning/src/edge/fabfile.py
@@ -26,7 +26,7 @@ from fabric.api import *
 import logging
 import sys
 import os
-from dlab.fab import *
+from datalab.fab import *
 import traceback
 
 
diff --git a/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py b/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py
index bad50ad..56861b8 100644
--- a/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py
+++ b/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py
@@ -23,7 +23,7 @@
 
 from fabric.api import *
 from fabric.contrib.files import exists
-from dlab.edge_lib import configure_http_proxy_server
+from datalab.edge_lib import configure_http_proxy_server
 import argparse
 import json
 import sys
diff --git a/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py b/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py
index b7123a3..4b4de0d 100644
--- a/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py
+++ b/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py
@@ -26,7 +26,7 @@ from fabric.api import *
 import argparse
 import sys
 import os
-from dlab.edge_lib import install_nginx_ldap
+from datalab.edge_lib import install_nginx_ldap
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
diff --git a/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py b/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py
index b853af5..a19eb67 100644
--- a/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py
+++ b/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py
@@ -25,9 +25,9 @@ import os
 import sys
 import logging
 import traceback
-from dlab.fab import *
-from dlab.meta_lib import *
-from dlab.actions_lib import *
+from datalab.fab import *
+from datalab.meta_lib import *
+from datalab.actions_lib import *
 from fabric.api import *
 import multiprocessing
 
diff --git a/infrastructure-provisioning/src/edge/templates/squid.conf b/infrastructure-provisioning/src/edge/templates/squid.conf
index 0129e00..9d45c44 100644
--- a/infrastructure-provisioning/src/edge/templates/squid.conf
+++ b/infrastructure-provisioning/src/edge/templates/squid.conf
@@ -21,7 +21,7 @@
 
 auth_param basic program LDAP_AUTH_PATH -b "LDAP_DN" -D "LDAP_SERVICE_USERNAME,LDAP_DN" -w LDAP_SERVICE_PASSWORD -f uid=%s LDAP_HOST
 
-acl DLab_user_src_subnet src PROXY_SUBNET
+acl DataLab_user_src_subnet src PROXY_SUBNET
 VPC_CIDRS
 ALLOWED_CIDRS
 
@@ -45,7 +45,7 @@ acl ldap-auth proxy_auth EDGE_USER_NAME
 http_access deny !Safe_ports
 http_access allow localhost manager
 http_access deny manager
-http_access allow DLab_user_src_subnet
+http_access allow DataLab_user_src_subnet
 http_access allow AllowedCIDRS ldap-auth
 http_access allow localhost
 http_access deny all
diff --git a/infrastructure-provisioning/src/general/api/check_inactivity.py b/infrastructure-provisioning/src/general/api/check_inactivity.py
index 31d7bb3..bddd150 100644
--- a/infrastructure-provisioning/src/general/api/check_inactivity.py
+++ b/infrastructure-provisioning/src/general/api/check_inactivity.py
@@ -49,7 +49,7 @@ if __name__ == "__main__":
     except:
         reply['response']['result'] = {"error": "Failed to open result.json"}
 
-    reply['response']['log'] = "/var/log/dlab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+    reply['response']['log'] = "/var/log/datalab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
                                                                           os.environ['project_name'],
                                                                           os.environ['request_id'])
 
diff --git a/infrastructure-provisioning/src/general/api/configure.py b/infrastructure-provisioning/src/general/api/configure.py
index 4d6cc5f..ada70b2 100644
--- a/infrastructure-provisioning/src/general/api/configure.py
+++ b/infrastructure-provisioning/src/general/api/configure.py
@@ -48,7 +48,7 @@ if __name__ == "__main__":
             reply['response']['result'] = json.loads(f.read())
     except:
         reply['response']['result'] = {"error": "Failed to open result.json"}
-    reply['response']['log'] = "/var/log/dlab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+    reply['response']['log'] = "/var/log/datalab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
                                                                           os.environ['project_name'],
                                                                           os.environ['request_id'])
 
diff --git a/infrastructure-provisioning/src/general/api/create.py b/infrastructure-provisioning/src/general/api/create.py
index b2437b0..352e06b 100644
--- a/infrastructure-provisioning/src/general/api/create.py
+++ b/infrastructure-provisioning/src/general/api/create.py
@@ -55,7 +55,7 @@ if __name__ == "__main__":
         with open("/response/{}.json".format(os.environ['request_id']), 'w') as response_file:
             response_file.write(json.dumps(reply))
     else:
-        reply['response']['log'] = "/var/log/dlab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+        reply['response']['log'] = "/var/log/datalab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
                                                                               os.environ['project_name'],
                                                                               os.environ['request_id'])
 
diff --git a/infrastructure-provisioning/src/general/api/create_image.py b/infrastructure-provisioning/src/general/api/create_image.py
index 425cf26..501adf4 100644
--- a/infrastructure-provisioning/src/general/api/create_image.py
+++ b/infrastructure-provisioning/src/general/api/create_image.py
@@ -49,7 +49,7 @@ if __name__ == "__main__":
     except:
         reply['response']['result'] = {"error": "Failed to open result.json"}
 
-    reply['response']['log'] = "/var/log/dlab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+    reply['response']['log'] = "/var/log/datalab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
                                                                           os.environ['project_name'],
                                                                           os.environ['request_id'])
 
diff --git a/infrastructure-provisioning/src/general/api/git_creds.py b/infrastructure-provisioning/src/general/api/git_creds.py
index 4edd370..c785fa0 100644
--- a/infrastructure-provisioning/src/general/api/git_creds.py
+++ b/infrastructure-provisioning/src/general/api/git_creds.py
@@ -49,7 +49,7 @@ if __name__ == "__main__":
     except:
         reply['response']['result'] = {"error": "Failed to open result.json"}
 
-    reply['response']['log'] = "/var/log/dlab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+    reply['response']['log'] = "/var/log/datalab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
                                                                           os.environ['project_name'],
                                                                           os.environ['request_id'])
 
diff --git a/infrastructure-provisioning/src/general/api/install_libs.py b/infrastructure-provisioning/src/general/api/install_libs.py
index 21489a9..823dc4b 100644
--- a/infrastructure-provisioning/src/general/api/install_libs.py
+++ b/infrastructure-provisioning/src/general/api/install_libs.py
@@ -49,7 +49,7 @@ if __name__ == "__main__":
     except:
         reply['response']['result'] = {"error": "Failed to open result.json"}
 
-    reply['response']['log'] = "/var/log/dlab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+    reply['response']['log'] = "/var/log/datalab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
                                                                           os.environ['project_name'],
                                                                           os.environ['request_id'])
 
diff --git a/infrastructure-provisioning/src/general/api/list_libs.py b/infrastructure-provisioning/src/general/api/list_libs.py
index d7c97a9..9438fe0 100644
--- a/infrastructure-provisioning/src/general/api/list_libs.py
+++ b/infrastructure-provisioning/src/general/api/list_libs.py
@@ -49,11 +49,11 @@ if __name__ == "__main__":
     except:
         reply['response']['result'] = {"error": "Failed to open result.json"}
 
-    reply['response']['log'] = "/var/log/dlab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+    reply['response']['log'] = "/var/log/datalab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
                                                                           os.environ['project_name'],
                                                                           os.environ['request_id'])
 
-    reply['response']['result']['file'] = "/opt/dlab/tmp/result/{0}_{1}_{2}_all_pkgs.json".format(os.environ['project_name'],
+    reply['response']['result']['file'] = "/opt/datalab/tmp/result/{0}_{1}_{2}_all_pkgs.json".format(os.environ['project_name'],
                                                                                                   os.environ['application'],
                                                                                                   os.environ['request_id'])
 
diff --git a/infrastructure-provisioning/src/general/api/reconfigure_spark.py b/infrastructure-provisioning/src/general/api/reconfigure_spark.py
index 09139ca..dd98f44 100644
--- a/infrastructure-provisioning/src/general/api/reconfigure_spark.py
+++ b/infrastructure-provisioning/src/general/api/reconfigure_spark.py
@@ -49,7 +49,7 @@ if __name__ == "__main__":
     except:
         reply['response']['result'] = {"error": "Failed to open result.json"}
 
-    reply['response']['log'] = "/var/log/dlab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+    reply['response']['log'] = "/var/log/datalab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
                                                                           os.environ['project_name'],
                                                                           os.environ['request_id'])
 
diff --git a/infrastructure-provisioning/src/general/api/recreate.py b/infrastructure-provisioning/src/general/api/recreate.py
index e91e579..a51535a 100644
--- a/infrastructure-provisioning/src/general/api/recreate.py
+++ b/infrastructure-provisioning/src/general/api/recreate.py
@@ -55,7 +55,7 @@ if __name__ == "__main__":
         with open("/response/{}.json".format(os.environ['request_id']), 'w') as response_file:
             response_file.write(json.dumps(reply))
     else:
-        reply['response']['log'] = "/var/log/dlab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+        reply['response']['log'] = "/var/log/datalab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
                                                                               os.environ['project_name'],
                                                                               os.environ['request_id'])
 
diff --git a/infrastructure-provisioning/src/general/api/reupload_key.py b/infrastructure-provisioning/src/general/api/reupload_key.py
index d4af110..f8b4e28 100644
--- a/infrastructure-provisioning/src/general/api/reupload_key.py
+++ b/infrastructure-provisioning/src/general/api/reupload_key.py
@@ -51,7 +51,7 @@ if __name__ == "__main__":
 
 
 
-    log = "/var/log/dlab/edge/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+    log = "/var/log/datalab/edge/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
                                                                           os.environ['project_name'],
                                                                           os.environ['request_id'])
     try:
diff --git a/infrastructure-provisioning/src/general/api/start.py b/infrastructure-provisioning/src/general/api/start.py
index 469d5a6..0522363 100644
--- a/infrastructure-provisioning/src/general/api/start.py
+++ b/infrastructure-provisioning/src/general/api/start.py
@@ -48,7 +48,7 @@ if __name__ == "__main__":
             reply['response']['result'] = json.loads(f.read())
     except:
         reply['response']['result'] = {"error": "Failed to open result.json"}
-    reply['response']['log'] = "/var/log/dlab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+    reply['response']['log'] = "/var/log/datalab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
                                                                           os.environ['project_name'],
                                                                           os.environ['request_id'])
 
diff --git a/infrastructure-provisioning/src/general/api/status.py b/infrastructure-provisioning/src/general/api/status.py
index 0b007b9..70b7961 100644
--- a/infrastructure-provisioning/src/general/api/status.py
+++ b/infrastructure-provisioning/src/general/api/status.py
@@ -48,7 +48,7 @@ if __name__ == "__main__":
             reply['response']['result'] = json.loads(f.read())
     except:
         reply['response']['result'] = {"error": "Failed to open result.json"}
-    reply['response']['log'] = "/var/log/dlab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+    reply['response']['log'] = "/var/log/datalab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
                                                                           os.environ['edge_user_name'],
                                                                           os.environ['request_id'])
 
diff --git a/infrastructure-provisioning/src/general/api/stop.py b/infrastructure-provisioning/src/general/api/stop.py
index 8dc2a11..26468af 100644
--- a/infrastructure-provisioning/src/general/api/stop.py
+++ b/infrastructure-provisioning/src/general/api/stop.py
@@ -49,7 +49,7 @@ if __name__ == "__main__":
     except:
         reply['response']['result'] = {"error": "Failed to open result.json"}
 
-    reply['response']['log'] = "/var/log/dlab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+    reply['response']['log'] = "/var/log/datalab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
                                                                           os.environ['project_name'],
                                                                           os.environ['request_id'])
 
diff --git a/infrastructure-provisioning/src/general/api/terminate.py b/infrastructure-provisioning/src/general/api/terminate.py
index 933af27..dde8fc5 100644
--- a/infrastructure-provisioning/src/general/api/terminate.py
+++ b/infrastructure-provisioning/src/general/api/terminate.py
@@ -55,7 +55,7 @@ if __name__ == "__main__":
         with open("/response/{}.json".format(os.environ['request_id']), 'w') as response_file:
             response_file.write(json.dumps(reply))
     else:
-        reply['response']['log'] = "/var/log/dlab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+        reply['response']['log'] = "/var/log/datalab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
                                                                               os.environ['project_name'],
                                                                               os.environ['request_id'])
 
diff --git a/infrastructure-provisioning/src/general/api/terminate_image.py b/infrastructure-provisioning/src/general/api/terminate_image.py
index 0700686..aea7934 100644
--- a/infrastructure-provisioning/src/general/api/terminate_image.py
+++ b/infrastructure-provisioning/src/general/api/terminate_image.py
@@ -49,7 +49,7 @@ if __name__ == "__main__":
     except:
         reply['response']['result'] = {"error": "Failed to open result.json"}
 
-    reply['response']['log'] = "/var/log/dlab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+    reply['response']['log'] = "/var/log/datalab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
                                                                           os.environ['project_name'],
                                                                           os.environ['request_id'])
 
diff --git a/infrastructure-provisioning/src/general/conf/dlab.ini b/infrastructure-provisioning/src/general/conf/datalab.ini
similarity index 89%
rename from infrastructure-provisioning/src/general/conf/dlab.ini
rename to infrastructure-provisioning/src/general/conf/datalab.ini
index c59d343..a6ff888 100644
--- a/infrastructure-provisioning/src/general/conf/dlab.ini
+++ b/infrastructure-provisioning/src/general/conf/datalab.ini
@@ -19,18 +19,18 @@
 ##
 ## ******************************************************************************
 
-#---- List of all dlab parameters (commented ones are passing from UI/Jenkins) ----#
+#---- List of all Data Lab parameters (commented ones are passing from UI/Jenkins) ----#
 
 
 #--- [conf] section contains all common for all templates parameters ---#
 [conf]
 ### Unique infrastructure name
 # service_base_name =
-### DLAB ssh user name ('dlab-user' by default)
-os_user = dlab-user
-### OS that supported by dlab (debian/redhat)
+### Data Lab ssh user name ('datalab-user' by default)
+os_user = datalab-user
+### OS that supported by Data Lab (debian/redhat)
 # os_family =
-### Cloud provider that supported by dlab (aws/azure)
+### Cloud provider that supported by Data Lab (aws/azure)
 # cloud_provider =
 ### Admin ssh key name in cloud provider
 # key_name =
@@ -43,7 +43,7 @@ lifecycle_stage = dev
 ### Pypi mirror for China
 pypi_mirror = pypi.doubanio.com
 ### Name of own GitLab SSL certificate
-gitlab_certfile = dlab-gitlab.crt
+gitlab_certfile = datalab-gitlab.crt
 ### Enable or Disable creating image at first time
 image_enabled = true
 ###Enable or Disable shared images
@@ -58,7 +58,7 @@ duo_vpc_enable = false
 # user_subnets_range =
 ### Comma-separated CIDR of IPs which will have access to SSN and Edge nodes
 allowed_ip_cidr = '0.0.0.0/0'
-### Type of network. Define in which network DLab will be deployed. Possible options: public|private
+### Type of network. Define in which network Data Lab will be deployed. Possible options: public|private
 network_type = public
 ### Additional tags in format 'Key1:Value1;Key2:Value2'
 # additional_tags =
@@ -66,7 +66,7 @@ pip_version = 20.1
 ### Billing tag key
 billing_tag_key = product
 ### Billing tag value
-billing_tag_value = dlab
+billing_tag_value = datalab
 ### Enable or disable Step certificates
 stepcerts_enabled = false
 ### Step root certificate in base64 format
@@ -100,7 +100,7 @@ private_subnet_prefix = 24
 # subnet_id =
 ### Id of the subnet for notebooks and compute engines
 # subnet2_id =
-### Id of the vpc for whole dlab provisioning
+### Id of the vpc for whole Data Lab provisioning
 # vpc_id =
 ### Id of the secondary vpc for notebooks and compute engines
 # vpc2_id =
@@ -114,13 +114,13 @@ private_subnet_prefix = 24
 ssn_instance_size = t2.large
 ### EC2 instance type for EDGE
 edge_instance_size = t2.medium
-### Amazon region name for whole dlab provisioning
+### Amazon region name for whole data Lab provisioning
 region = us-west-2
 ### Amazon zone letter for ssn, edge and notebook subnet provisioning
 # zone =
-### Amazon ami name based on debian conf_os_family for all dlab instances
+### Amazon ami name based on debian conf_os_family for all Data Lab instances
 debian_image_name = ubuntu/images/hvm-ssd/ubuntu-bionic-18.04-amd64-server-20200112
-### Amazon ami name based on RedHat conf_os_family for all dlab instances
+### Amazon ami name based on RedHat conf_os_family for all Data Lab instances
 redhat_image_name = RHEL-7.4_HVM-20180103-x86_64-2-Hourly2-GP2
 ### Amazon account ID
 # account_id =
@@ -150,9 +150,9 @@ edge_instance_size = Standard_DS1_v2
 # dataengine_master_size =
 ### Slave node size for Data Engine
 # dataengine_slave_size =
-### Azure image name based on debian conf_os_family for all dlab instances
+### Azure image name based on debian conf_os_family for all Data Lab instances
 debian_image_name = Canonical_UbuntuServer_18.04-LTS
-### Azure image name based on RedHat conf_os_family for all dlab instances
+### Azure image name based on RedHat conf_os_family for all Data Lab instances
 redhat_image_name = RedHat_RHEL_7.3
 ### Azure AD user name
 # user_name =
@@ -178,19 +178,19 @@ datalake_enable = false
 # project_id =
 ### Full path to service account JSON
 # service_account_path =
-### Name of the vpc for whole DLab provisioning
+### Name of the vpc for whole Data Lab provisioning
 # vpc_name =
 ### Name of the subnet for SSN and EDGE provisioning
 # subnet_name =
 ### Names of the firewall rules for SSN
 # firewall_rules =
-### GCP region name for whole dlab provisioning
+### GCP region name for whole Data Lab provisioning
 region = us-west1
-### GCP zone name for whole dlab provisioning
+### GCP zone name for whole Data Lab provisioning
 zone = us-west1-a
-### GCP ami name based on debian conf_os_family for all dlab instances
+### GCP ami name based on debian conf_os_family for all Data Lab instances
 debian_image_name = /projects/ubuntu-os-cloud/global/images/ubuntu-1804-bionic-v20200317
-### GCP ami name based on RedHat conf_os_family for all dlab instances
+### GCP ami name based on RedHat conf_os_family for all Data Lab instances
 redhat_image_name =
 ### Prefix of the private subnet
 ### Instance type for EDGE
@@ -202,8 +202,8 @@ gpu_accelerator_type = nvidia-tesla-k80
 
 #--- [ssn] section contains all parameters that are using for self-service node provisioning ---#
 [ssn]
-### System path on SSN instance where dlab will be installed
-dlab_path = /opt/dlab/
+### System path on SSN instance where Data Lab will be installed
+datalab_path = /opt/datalab/
 ### Elastic IP which will be associated with SSN node
 # elastic_ip =
 ### Version of Docker to be installed on SSN
diff --git a/infrastructure-provisioning/src/general/files/aws/base_Dockerfile b/infrastructure-provisioning/src/general/files/aws/base_Dockerfile
index e298f18..95eda49 100644
--- a/infrastructure-provisioning/src/general/files/aws/base_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/base_Dockerfile
@@ -58,16 +58,16 @@ RUN mkdir -p /root/conf && \
     mkdir -p /root/scripts && \
     mkdir -p /root/templates && \
     mkdir -p /root/files && \
-    mkdir -p /usr/lib/python2.7/dlab && \
+    mkdir -p /usr/lib/python2.7/datalab && \
     mkdir -p /root/keys/.ssh
 
 COPY ${SRC_PATH}base/ /root
 COPY ${SRC_PATH}general/conf/* /root/conf/
 COPY ${SRC_PATH}general/api/*.py /bin/
 COPY ${SRC_PATH}general/scripts/aws/common_* /root/scripts/
-COPY ${SRC_PATH}general/lib/aws/* /usr/lib/python2.7/dlab/
-COPY ${SRC_PATH}general/lib/os/${OS}/common_lib.py /usr/lib/python2.7/dlab/common_lib.py
-COPY ${SRC_PATH}general/lib/os/fab.py /usr/lib/python2.7/dlab/fab.py
+COPY ${SRC_PATH}general/lib/aws/* /usr/lib/python2.7/datalab/
+COPY ${SRC_PATH}general/lib/os/${OS}/common_lib.py /usr/lib/python2.7/datalab/common_lib.py
+COPY ${SRC_PATH}general/lib/os/fab.py /usr/lib/python2.7/datalab/fab.py
 COPY ${SRC_PATH}general/files/os/${OS}/sources.list /root/files/
 COPY ${SRC_PATH}general/files/os/ivysettings.xml /root/templates/
 COPY ${SRC_PATH}general/files/os/local_endpoint.json /root/files/
diff --git a/infrastructure-provisioning/src/general/files/aws/base_description.json b/infrastructure-provisioning/src/general/files/aws/base_description.json
index 1f39be3..a166fd3 100644
--- a/infrastructure-provisioning/src/general/files/aws/base_description.json
+++ b/infrastructure-provisioning/src/general/files/aws/base_description.json
@@ -1,4 +1,4 @@
 {
-  "template_name": "DLab AWS Base",
+  "template_name": "Data Lab AWS Base",
   "description": "Base image with aws tools"
 }
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/files/aws/dataengine-service_Dockerfile b/infrastructure-provisioning/src/general/files/aws/dataengine-service_Dockerfile
index 6e0c7bc..9a2296c 100644
--- a/infrastructure-provisioning/src/general/files/aws/dataengine-service_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/dataengine-service_Dockerfile
@@ -19,7 +19,7 @@
 #
 # ******************************************************************************
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
@@ -30,8 +30,8 @@ COPY general/scripts/os/common_configure_proxy.py /root/scripts/
 COPY general/scripts/os/install_additional_libs.py /root/scripts/install_additional_libs.py
 COPY general/scripts/os/get_list_available_pkgs.py /root/scripts/get_list_available_pkgs.py
 COPY general/scripts/os/common_* /root/scripts/
-COPY general/lib/os/redhat/common_lib.py /usr/lib/python2.7/dlab/common_lib.py
-COPY general/lib/os/redhat/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/redhat/common_lib.py /usr/lib/python2.7/datalab/common_lib.py
+COPY general/lib/os/redhat/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/inactive.sh /root/templates/
 COPY general/templates/os/inactive.service /root/templates/
 COPY general/templates/os/inactive.timer /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/aws/dataengine_Dockerfile b/infrastructure-provisioning/src/general/files/aws/dataengine_Dockerfile
index 313ddd2..0277816 100644
--- a/infrastructure-provisioning/src/general/files/aws/dataengine_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/dataengine_Dockerfile
@@ -19,7 +19,7 @@
 #
 # ******************************************************************************
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
@@ -29,7 +29,7 @@ COPY general/scripts/os/update_inactivity_on_start.py /root/scripts/
 COPY general/scripts/os/reconfigure_spark.py /root/scripts/
 COPY general/scripts/os/install_additional_libs.py /root/scripts/install_additional_libs.py
 COPY general/scripts/os/get_list_available_pkgs.py /root/scripts/get_list_available_pkgs.py
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/scripts/os/common_* /root/scripts/
 COPY general/scripts/aws/dataengine_* /root/scripts/
 COPY general/scripts/os/notebook_reconfigure_dataengine_spark.py /root/scripts/
diff --git a/infrastructure-provisioning/src/general/files/aws/deeplearning_Dockerfile b/infrastructure-provisioning/src/general/files/aws/deeplearning_Dockerfile
index 587a1b6..ce863e8 100644
--- a/infrastructure-provisioning/src/general/files/aws/deeplearning_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/deeplearning_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY deeplearning/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/aws/deeplearning_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/jupyter-notebook.service /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/aws/edge_Dockerfile b/infrastructure-provisioning/src/general/files/aws/edge_Dockerfile
index 143eee6..7289921 100644
--- a/infrastructure-provisioning/src/general/files/aws/edge_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/edge_Dockerfile
@@ -20,13 +20,13 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY edge/ /root/
 COPY general/scripts/aws/edge_* /root/scripts/
-COPY general/lib/os/${OS}/edge_lib.py /usr/lib/python2.7/dlab/edge_lib.py
+COPY general/lib/os/${OS}/edge_lib.py /usr/lib/python2.7/datalab/edge_lib.py
 COPY general/templates/aws/edge_s3_policy.json /root/templates/edge_s3_policy.json
 
 RUN chmod a+x /root/fabfile.py; \
diff --git a/infrastructure-provisioning/src/general/files/aws/jupyter_Dockerfile b/infrastructure-provisioning/src/general/files/aws/jupyter_Dockerfile
index a2d6198..108e4cb 100644
--- a/infrastructure-provisioning/src/general/files/aws/jupyter_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/jupyter_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY jupyter/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/aws/jupyter_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/jupyter-notebook.service /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/aws/jupyterlab_Dockerfile b/infrastructure-provisioning/src/general/files/aws/jupyterlab_Dockerfile
index 592955e..7daaa5c 100644
--- a/infrastructure-provisioning/src/general/files/aws/jupyterlab_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/jupyterlab_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY jupyterlab/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/aws/jupyter* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
 COPY general/templates/os/pyspark_local_template.json /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/aws/project_Dockerfile b/infrastructure-provisioning/src/general/files/aws/project_Dockerfile
index 0c23ae0..b2baad7 100644
--- a/infrastructure-provisioning/src/general/files/aws/project_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/project_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY project/ /root/
 COPY general/scripts/aws/project_* /root/scripts/
 COPY general/scripts/aws/edge_* /root/scripts/
-COPY general/lib/os/${OS}/edge_lib.py /usr/lib/python2.7/dlab/edge_lib.py
+COPY general/lib/os/${OS}/edge_lib.py /usr/lib/python2.7/datalab/edge_lib.py
 COPY general/templates/aws/edge_s3_policy.json /root/templates/edge_s3_policy.json
 COPY general/templates/os/manage_step_certs.sh /root/templates/
 COPY general/templates/os/step-cert-manager.service /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/aws/rstudio_Dockerfile b/infrastructure-provisioning/src/general/files/aws/rstudio_Dockerfile
index aafe294..abd2282 100644
--- a/infrastructure-provisioning/src/general/files/aws/rstudio_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/rstudio_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY rstudio/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/aws/rstudio_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
 COPY general/templates/os/inactive_rs.sh /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/aws/ssn_Dockerfile b/infrastructure-provisioning/src/general/files/aws/ssn_Dockerfile
index aeef12b..ce3e3cf 100644
--- a/infrastructure-provisioning/src/general/files/aws/ssn_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/ssn_Dockerfile
@@ -19,13 +19,13 @@
 #
 # ******************************************************************************
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY infrastructure-provisioning/src/ssn/ /root/
 COPY infrastructure-provisioning/src/general/scripts/aws/ssn_* /root/scripts/
-COPY infrastructure-provisioning/src/general/lib/os/${OS}/ssn_lib.py /usr/lib/python2.7/dlab/ssn_lib.py
+COPY infrastructure-provisioning/src/general/lib/os/${OS}/ssn_lib.py /usr/lib/python2.7/datalab/ssn_lib.py
 COPY infrastructure-provisioning/src/general/files/aws/ssn_policy.json /root/files/
 COPY infrastructure-provisioning/src/general/templates/aws/jenkins_jobs /root/templates/jenkins_jobs
 COPY infrastructure-provisioning/src/general/templates/os/manage_step_certs.sh /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/aws/ssn_description.json b/infrastructure-provisioning/src/general/files/aws/ssn_description.json
index 695e22f..be0d779 100644
--- a/infrastructure-provisioning/src/general/files/aws/ssn_description.json
+++ b/infrastructure-provisioning/src/general/files/aws/ssn_description.json
@@ -1,4 +1,4 @@
 {
-  "template_name": "DLab AWS SelfService Node and infra",
+  "template_name": "Data Lab AWS SelfService Node and infra",
   "description": "placeholder"
 }
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/files/aws/tensor-rstudio_Dockerfile b/infrastructure-provisioning/src/general/files/aws/tensor-rstudio_Dockerfile
index 7efbb3c..d24e1aa 100644
--- a/infrastructure-provisioning/src/general/files/aws/tensor-rstudio_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/tensor-rstudio_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY tensor-rstudio/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/aws/tensor-rstudio_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/aws/tensor_Dockerfile b/infrastructure-provisioning/src/general/files/aws/tensor_Dockerfile
index 091168d..22eecb9 100644
--- a/infrastructure-provisioning/src/general/files/aws/tensor_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/tensor_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY tensor/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/aws/tensor_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/jupyter-notebook.service /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/aws/zeppelin_Dockerfile b/infrastructure-provisioning/src/general/files/aws/zeppelin_Dockerfile
index 6c0705f..c6fd9ef 100644
--- a/infrastructure-provisioning/src/general/files/aws/zeppelin_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/zeppelin_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY zeppelin/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/aws/zeppelin_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/aws/interpreter_livy.json /root/templates/
 COPY general/templates/aws/interpreter_spark.json /root/templates/
 COPY general/templates/aws/dataengine-service_interpreter_* /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/azure/base_Dockerfile b/infrastructure-provisioning/src/general/files/azure/base_Dockerfile
index 883225d..3306c22 100644
--- a/infrastructure-provisioning/src/general/files/azure/base_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/base_Dockerfile
@@ -58,16 +58,16 @@ RUN mkdir -p /root/conf && \
     mkdir -p /root/scripts && \
     mkdir -p /root/templates && \
     mkdir -p /root/files && \
-    mkdir -p /usr/lib/python2.7/dlab && \
+    mkdir -p /usr/lib/python2.7/datalab && \
     mkdir -p /root/keys/.ssh
 
 COPY ${SRC_PATH}base/ /root
 COPY ${SRC_PATH}general/conf/* /root/conf/
 COPY ${SRC_PATH}general/api/*.py /bin/
 COPY ${SRC_PATH}general/scripts/azure/common_* /root/scripts/
-COPY ${SRC_PATH}general/lib/azure/* /usr/lib/python2.7/dlab/
-COPY ${SRC_PATH}general/lib/os/${OS}/common_lib.py /usr/lib/python2.7/dlab/common_lib.py
-COPY ${SRC_PATH}general/lib/os/fab.py /usr/lib/python2.7/dlab/fab.py
+COPY ${SRC_PATH}general/lib/azure/* /usr/lib/python2.7/datalab/
+COPY ${SRC_PATH}general/lib/os/${OS}/common_lib.py /usr/lib/python2.7/datalab/common_lib.py
+COPY ${SRC_PATH}general/lib/os/fab.py /usr/lib/python2.7/datalab/fab.py
 COPY ${SRC_PATH}general/files/os/${OS}/sources.list /root/files/
 COPY ${SRC_PATH}general/files/os/ivysettings.xml /root/templates/
 COPY ${SRC_PATH}general/files/os/local_endpoint.json /root/files/
diff --git a/infrastructure-provisioning/src/general/files/azure/base_description.json b/infrastructure-provisioning/src/general/files/azure/base_description.json
index 63e884c..3c37048 100644
--- a/infrastructure-provisioning/src/general/files/azure/base_description.json
+++ b/infrastructure-provisioning/src/general/files/azure/base_description.json
@@ -1,4 +1,4 @@
 {
-  "template_name": "DLab Azure Base",
+  "template_name": "Data Lab Azure Base",
   "description": "Base image with aws tools"
 }
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/files/azure/dataengine_Dockerfile b/infrastructure-provisioning/src/general/files/azure/dataengine_Dockerfile
index daefa72..4ed38a9 100644
--- a/infrastructure-provisioning/src/general/files/azure/dataengine_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/dataengine_Dockerfile
@@ -19,12 +19,12 @@
 #
 # ******************************************************************************
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY dataengine/ /root/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/scripts/os/dataengine_* /root/scripts/
 COPY general/scripts/os/update_inactivity_on_start.py /root/scripts/
 COPY general/scripts/os/reconfigure_spark.py /root/scripts/
diff --git a/infrastructure-provisioning/src/general/files/azure/deeplearning_Dockerfile b/infrastructure-provisioning/src/general/files/azure/deeplearning_Dockerfile
index 641734b..6323abe 100644
--- a/infrastructure-provisioning/src/general/files/azure/deeplearning_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/deeplearning_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY deeplearning/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/azure/deeplearning_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/jupyter-notebook.service /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/azure/edge_Dockerfile b/infrastructure-provisioning/src/general/files/azure/edge_Dockerfile
index 5ad24cd..6ee207c 100644
--- a/infrastructure-provisioning/src/general/files/azure/edge_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/edge_Dockerfile
@@ -20,13 +20,13 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY edge/ /root/
 COPY general/scripts/azure/edge_* /root/scripts/
-COPY general/lib/os/${OS}/edge_lib.py /usr/lib/python2.7/dlab/edge_lib.py
+COPY general/lib/os/${OS}/edge_lib.py /usr/lib/python2.7/datalab/edge_lib.py
 
 RUN chmod a+x /root/fabfile.py; \
     chmod a+x /root/scripts/*
diff --git a/infrastructure-provisioning/src/general/files/azure/jupyter_Dockerfile b/infrastructure-provisioning/src/general/files/azure/jupyter_Dockerfile
index 8a39401..85c1c44 100644
--- a/infrastructure-provisioning/src/general/files/azure/jupyter_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/jupyter_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY jupyter/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/azure/jupyter_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/jupyter-notebook.service /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/azure/jupyterlab_Dockerfile b/infrastructure-provisioning/src/general/files/azure/jupyterlab_Dockerfile
index 0ff6810..b2f392f 100644
--- a/infrastructure-provisioning/src/general/files/azure/jupyterlab_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/jupyterlab_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY jupyterlab/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/azure/jupyter* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/jupyter-notebook.service /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/azure/project_Dockerfile b/infrastructure-provisioning/src/general/files/azure/project_Dockerfile
index 823becc..a60747b 100644
--- a/infrastructure-provisioning/src/general/files/azure/project_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/project_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY project/ /root/
 COPY general/scripts/azure/project_* /root/scripts/
 COPY general/scripts/azure/edge_* /root/scripts/
-COPY general/lib/os/${OS}/edge_lib.py /usr/lib/python2.7/dlab/edge_lib.py
+COPY general/lib/os/${OS}/edge_lib.py /usr/lib/python2.7/datalab/edge_lib.py
 COPY general/templates/os/manage_step_certs.sh /root/templates/
 COPY general/templates/os/step-cert-manager.service /root/templates/
 
diff --git a/infrastructure-provisioning/src/general/files/azure/rstudio_Dockerfile b/infrastructure-provisioning/src/general/files/azure/rstudio_Dockerfile
index dc77e72..8286ac1 100644
--- a/infrastructure-provisioning/src/general/files/azure/rstudio_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/rstudio_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY rstudio/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/azure/rstudio_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
 COPY general/templates/os/inactive_rs.sh /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/azure/ssn_Dockerfile b/infrastructure-provisioning/src/general/files/azure/ssn_Dockerfile
index ee9be75..f257af0 100644
--- a/infrastructure-provisioning/src/general/files/azure/ssn_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/ssn_Dockerfile
@@ -19,13 +19,13 @@
 #
 # ******************************************************************************
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY infrastructure-provisioning/src/ssn/ /root/
 COPY infrastructure-provisioning/src/general/scripts/azure/ssn_* /root/scripts/
-COPY infrastructure-provisioning/src/general/lib/os/${OS}/ssn_lib.py /usr/lib/python2.7/dlab/ssn_lib.py
+COPY infrastructure-provisioning/src/general/lib/os/${OS}/ssn_lib.py /usr/lib/python2.7/datalab/ssn_lib.py
 COPY infrastructure-provisioning/src/general/templates/azure/jenkins_jobs /root/templates/jenkins_jobs
 COPY infrastructure-provisioning/src/general/templates/os/manage_step_certs.sh /root/templates/
 COPY infrastructure-provisioning/src/general/templates/os/step-cert-manager.service /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/azure/ssn_description.json b/infrastructure-provisioning/src/general/files/azure/ssn_description.json
index 61a3d59..29a21d3 100644
--- a/infrastructure-provisioning/src/general/files/azure/ssn_description.json
+++ b/infrastructure-provisioning/src/general/files/azure/ssn_description.json
@@ -1,4 +1,4 @@
 {
-  "template_name": "DLab Azure SelfService Node and infra",
+  "template_name": "Data Lab Azure SelfService Node and infra",
   "description": "placeholder"
 }
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/files/azure/tensor_Dockerfile b/infrastructure-provisioning/src/general/files/azure/tensor_Dockerfile
index 2e52b1c..56aa7c5 100644
--- a/infrastructure-provisioning/src/general/files/azure/tensor_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/tensor_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY tensor/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/azure/tensor_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/jupyter-notebook.service /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/azure/zeppelin_Dockerfile b/infrastructure-provisioning/src/general/files/azure/zeppelin_Dockerfile
index d86ebb7..c9b0181 100644
--- a/infrastructure-provisioning/src/general/files/azure/zeppelin_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/zeppelin_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY zeppelin/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/azure/zeppelin_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/azure/interpreter_livy.json /root/templates/
 COPY general/templates/azure/interpreter_spark.json /root/templates/
 COPY general/templates/os/dataengine_interpreter_livy.json /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/gcp/base_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/base_Dockerfile
index d8f6447..17409be 100644
--- a/infrastructure-provisioning/src/general/files/gcp/base_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/base_Dockerfile
@@ -58,16 +58,16 @@ RUN mkdir -p /root/conf && \
     mkdir -p /root/scripts && \
     mkdir -p /root/templates && \
     mkdir -p /root/files && \
-    mkdir -p /usr/lib/python2.7/dlab && \
+    mkdir -p /usr/lib/python2.7/datalab && \
     mkdir -p /root/keys/.ssh
 
 COPY ${SRC_PATH}base/ /root
 COPY ${SRC_PATH}general/conf/* /root/conf/
 COPY ${SRC_PATH}general/api/*.py /bin/
 COPY ${SRC_PATH}general/scripts/gcp/common_* /root/scripts/
-COPY ${SRC_PATH}general/lib/gcp/* /usr/lib/python2.7/dlab/
-COPY ${SRC_PATH}general/lib/os/${OS}/common_lib.py /usr/lib/python2.7/dlab/common_lib.py
-COPY ${SRC_PATH}general/lib/os/fab.py /usr/lib/python2.7/dlab/fab.py
+COPY ${SRC_PATH}general/lib/gcp/* /usr/lib/python2.7/datalab/
+COPY ${SRC_PATH}general/lib/os/${OS}/common_lib.py /usr/lib/python2.7/datalab/common_lib.py
+COPY ${SRC_PATH}general/lib/os/fab.py /usr/lib/python2.7/datalab/fab.py
 COPY ${SRC_PATH}general/files/os/${OS}/sources.list /root/files/
 COPY ${SRC_PATH}general/files/os/ivysettings.xml /root/templates/
 COPY ${SRC_PATH}general/files/os/local_endpoint.json /root/files/
diff --git a/infrastructure-provisioning/src/general/files/gcp/base_description.json b/infrastructure-provisioning/src/general/files/gcp/base_description.json
index 7773772..cf4e901 100644
--- a/infrastructure-provisioning/src/general/files/gcp/base_description.json
+++ b/infrastructure-provisioning/src/general/files/gcp/base_description.json
@@ -1,4 +1,4 @@
 {
-  "template_name": "DLab GCP Base",
+  "template_name": "Data Lab GCP Base",
   "description": "Base image with gcp tools"
 }
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/files/gcp/dataengine-service_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/dataengine-service_Dockerfile
index 00cad0d..0e5bf50 100644
--- a/infrastructure-provisioning/src/general/files/gcp/dataengine-service_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/dataengine-service_Dockerfile
@@ -19,14 +19,14 @@
 #
 # ******************************************************************************
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY dataengine-service/fabfile.py /root/
 COPY dataengine-service/description.json /root/
 COPY general/scripts/gcp/dataengine-service_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/scripts/os/common_* /root/scripts/
 COPY general/scripts/os/install_additional_libs.py /root/scripts/install_additional_libs.py
 COPY general/scripts/os/get_list_available_pkgs.py /root/scripts/get_list_available_pkgs.py
diff --git a/infrastructure-provisioning/src/general/files/gcp/dataengine_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/dataengine_Dockerfile
index 139e565..60ba4c6 100644
--- a/infrastructure-provisioning/src/general/files/gcp/dataengine_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/dataengine_Dockerfile
@@ -19,7 +19,7 @@
 #
 # ******************************************************************************
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
@@ -29,7 +29,7 @@ COPY general/scripts/os/update_inactivity_on_start.py /root/scripts/
 COPY general/scripts/os/reconfigure_spark.py /root/scripts/
 COPY general/scripts/os/install_additional_libs.py /root/scripts/install_additional_libs.py
 COPY general/scripts/os/get_list_available_pkgs.py /root/scripts/get_list_available_pkgs.py
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/scripts/os/notebook_reconfigure_dataengine_spark.py /root/scripts/
 COPY general/scripts/os/common_* /root/scripts/
 COPY general/scripts/gcp/dataengine_* /root/scripts/
diff --git a/infrastructure-provisioning/src/general/files/gcp/deeplearning_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/deeplearning_Dockerfile
index cfce2f6..7fb9053 100644
--- a/infrastructure-provisioning/src/general/files/gcp/deeplearning_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/deeplearning_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY deeplearning/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/gcp/deeplearning_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/jupyter-notebook.service /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/gcp/edge_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/edge_Dockerfile
index f60f409..56e2aa1 100644
--- a/infrastructure-provisioning/src/general/files/gcp/edge_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/edge_Dockerfile
@@ -20,13 +20,13 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY edge/ /root/
 COPY general/scripts/gcp/edge_* /root/scripts/
-COPY general/lib/os/${OS}/edge_lib.py /usr/lib/python2.7/dlab/edge_lib.py
+COPY general/lib/os/${OS}/edge_lib.py /usr/lib/python2.7/datalab/edge_lib.py
 COPY general/files/gcp/ps_policy.json /root/files/ps_policy.json
 COPY general/files/gcp/ps_roles.json /root/files/ps_roles.json
 
diff --git a/infrastructure-provisioning/src/general/files/gcp/jupyter_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/jupyter_Dockerfile
index bd4b1bd..1791d79 100644
--- a/infrastructure-provisioning/src/general/files/gcp/jupyter_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/jupyter_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY jupyter/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/gcp/jupyter_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/jupyter-notebook.service /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/gcp/jupyterlab_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/jupyterlab_Dockerfile
index 9c30f94..d9a2da2 100644
--- a/infrastructure-provisioning/src/general/files/gcp/jupyterlab_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/jupyterlab_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY jupyterlab/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/gcp/jupyter* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
 COPY general/templates/os/pyspark_local_template.json /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/gcp/project_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/project_Dockerfile
index 7fc44e5..502fcb2 100644
--- a/infrastructure-provisioning/src/general/files/gcp/project_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/project_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY project/ /root/
 COPY general/scripts/gcp/project_* /root/scripts/
 COPY general/scripts/gcp/edge_* /root/scripts/
-COPY general/lib/os/${OS}/edge_lib.py /usr/lib/python2.7/dlab/edge_lib.py
+COPY general/lib/os/${OS}/edge_lib.py /usr/lib/python2.7/datalab/edge_lib.py
 COPY general/files/gcp/ps_policy.json /root/files/ps_policy.json
 COPY general/files/gcp/ps_roles.json /root/files/ps_roles.json
 COPY general/templates/os/manage_step_certs.sh /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/gcp/rstudio_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/rstudio_Dockerfile
index 0a07470..c54a690 100644
--- a/infrastructure-provisioning/src/general/files/gcp/rstudio_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/rstudio_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY rstudio/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/gcp/rstudio_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
 COPY general/templates/gcp/core-site.xml /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/gcp/ssn_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/ssn_Dockerfile
index 902f48c..af553e4 100644
--- a/infrastructure-provisioning/src/general/files/gcp/ssn_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/ssn_Dockerfile
@@ -19,13 +19,13 @@
 #
 # ******************************************************************************
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY infrastructure-provisioning/src/ssn/ /root/
 COPY infrastructure-provisioning/src/general/scripts/gcp/ssn_* /root/scripts/
-COPY infrastructure-provisioning/src/general/lib/os/${OS}/ssn_lib.py /usr/lib/python2.7/dlab/ssn_lib.py
+COPY infrastructure-provisioning/src/general/lib/os/${OS}/ssn_lib.py /usr/lib/python2.7/datalab/ssn_lib.py
 COPY infrastructure-provisioning/src/general/files/gcp/ssn_policy.json /root/files/
 COPY infrastructure-provisioning/src/general/files/gcp/ssn_roles.json /root/files/
 COPY infrastructure-provisioning/src/general/templates/gcp/jenkins_jobs /root/templates/jenkins_jobs
diff --git a/infrastructure-provisioning/src/general/files/gcp/ssn_description.json b/infrastructure-provisioning/src/general/files/gcp/ssn_description.json
index 8193ad8..3dbd206 100644
--- a/infrastructure-provisioning/src/general/files/gcp/ssn_description.json
+++ b/infrastructure-provisioning/src/general/files/gcp/ssn_description.json
@@ -1,4 +1,4 @@
 {
-  "template_name": "DLab GCP SelfService Node and infra",
+  "template_name": "Data Lab GCP SelfService Node and infra",
   "description": "placeholder"
 }
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/files/gcp/superset_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/superset_Dockerfile
index 224482c..5423ef0 100644
--- a/infrastructure-provisioning/src/general/files/gcp/superset_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/superset_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY superset/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/gcp/superset_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/superset-notebook.service /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 
diff --git a/infrastructure-provisioning/src/general/files/gcp/tensor-rstudio_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/tensor-rstudio_Dockerfile
index 3f609b4..9f5c74c 100644
--- a/infrastructure-provisioning/src/general/files/gcp/tensor-rstudio_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/tensor-rstudio_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY tensor-rstudio/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/gcp/tensor-rstudio_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/gcp/tensor_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/tensor_Dockerfile
index 28cd54e..93ef83d 100644
--- a/infrastructure-provisioning/src/general/files/gcp/tensor_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/tensor_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY tensor/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/gcp/tensor_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/os/${OS}/jupyter-notebook.service /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/gcp/zeppelin_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/zeppelin_Dockerfile
index ab5b22e..c95dc6c 100644
--- a/infrastructure-provisioning/src/general/files/gcp/zeppelin_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/zeppelin_Dockerfile
@@ -20,14 +20,14 @@
 # ******************************************************************************
 
 
-FROM docker.dlab-base:latest
+FROM docker.datalab-base:latest
 
 ARG OS
 
 COPY zeppelin/ /root/
 COPY general/scripts/os/* /root/scripts/
 COPY general/scripts/gcp/zeppelin_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/datalab/notebook_lib.py
 COPY general/templates/gcp/interpreter_livy.json /root/templates/
 COPY general/templates/gcp/interpreter_spark.json /root/templates/
 COPY general/templates/os/dataengine_interpreter_livy.json /root/templates/
diff --git a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
index 638ba1d..c322342 100644
--- a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
@@ -31,12 +31,12 @@ import json
 from fabric.api import *
 from fabric.contrib.files import exists
 import logging
-from dlab.meta_lib import *
-from dlab.fab import *
+from datalab.meta_lib import *
+from datalab.fab import *
 import traceback
 import urllib2
 import meta_lib
-import dlab.fab
+import datalab.fab
 import uuid
 import ast
 
@@ -1291,7 +1291,7 @@ def remove_kernels(emr_name, tag_name, nb_tag_value, ssh_user, key_path, emr_ver
                     sudo('rm -rf /home/{}/.ensure_dir/dataengine-service_{}_interpreter_ensured'.format(ssh_user,
                                                                                                         emr_name))
                 if exists('/home/{}/.ensure_dir/rstudio_dataengine-service_ensured'.format(ssh_user)):
-                    dlab.fab.remove_rstudio_dataengines_kernel(computational_name, ssh_user)
+                    datalab.fab.remove_rstudio_dataengines_kernel(computational_name, ssh_user)
                 sudo('rm -rf  /opt/' + emr_version + '/' + emr_name + '/')
                 print("Notebook's {} kernels were removed".format(env.hosts))
         else:
@@ -1656,12 +1656,12 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
                                                            '/tmp/notebook_spark-defaults_local.conf')
         sudo('\cp -f /tmp/notebook_spark-defaults_local.conf /opt/spark/conf/spark-defaults.conf')
         if memory_type == 'driver':
-            spark_memory = dlab.fab.get_spark_memory()
+            spark_memory = datalab.fab.get_spark_memory()
             sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
             sudo('echo "spark.{0}.memory {1}m" >> /opt/spark/conf/spark-defaults.conf'.format(memory_type,
                                                                                               spark_memory))
         if 'spark_configurations' in os.environ:
-            dlab_header = sudo('cat /tmp/notebook_spark-defaults_local.conf | grep "^#"')
+            datalab_header = sudo('cat /tmp/notebook_spark-defaults_local.conf | grep "^#"')
             spark_configurations = ast.literal_eval(os.environ['spark_configurations'])
             new_spark_defaults = list()
             spark_defaults = sudo('cat /opt/spark/conf/spark-defaults.conf')
@@ -1677,7 +1677,7 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
                                     new_spark_defaults.append(property + ' ' + config['Properties'][property])
                     new_spark_defaults.append(param)
             new_spark_defaults = set(new_spark_defaults)
-            sudo("echo '{}' > /opt/spark/conf/spark-defaults.conf".format(dlab_header))
+            sudo("echo '{}' > /opt/spark/conf/spark-defaults.conf".format(datalab_header))
             for prop in new_spark_defaults:
                 prop = prop.rstrip()
                 sudo('echo "{}" >> /opt/spark/conf/spark-defaults.conf'.format(prop))
@@ -1831,7 +1831,7 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
         local('cp -f /tmp/{0}/notebook_spark-defaults_local.conf  {1}spark/conf/spark-defaults.conf'.format(cluster_name,
                                                                                                         cluster_dir))
     if spark_configs and os.path.exists('{0}'.format(cluster_dir)):
-        dlab_header = local('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name),
+        datalab_header = local('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name),
                             capture=True)
         spark_configurations = ast.literal_eval(spark_configs)
         new_spark_defaults = list()
@@ -1848,7 +1848,7 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
                                 new_spark_defaults.append(property + ' ' + config['Properties'][property])
                 new_spark_defaults.append(param)
         new_spark_defaults = set(new_spark_defaults)
-        local("echo '{0}' > {1}/spark/conf/spark-defaults.conf".format(dlab_header, cluster_dir))
+        local("echo '{0}' > {1}/spark/conf/spark-defaults.conf".format(datalab_header, cluster_dir))
         for prop in new_spark_defaults:
             prop = prop.rstrip()
             local('echo "{0}" >> {1}/spark/conf/spark-defaults.conf'.format(prop, cluster_dir))
@@ -1905,7 +1905,7 @@ def remove_dataengine_kernels(tag_name, notebook_name, os_user, key_path, cluste
             sudo('sleep 5')
             sudo('rm -rf /home/{}/.ensure_dir/dataengine_{}_interpreter_ensured'.format(os_user, cluster_name))
         if exists('/home/{}/.ensure_dir/rstudio_dataengine_ensured'.format(os_user)):
-            dlab.fab.remove_rstudio_dataengines_kernel(os.environ['computational_name'], os_user)
+            datalab.fab.remove_rstudio_dataengines_kernel(os.environ['computational_name'], os_user)
         sudo('rm -rf  /opt/' + cluster_name + '/')
         print("Notebook's {} kernels were removed".format(env.hosts))
     except Exception as err:
diff --git a/infrastructure-provisioning/src/general/lib/aws/meta_lib.py b/infrastructure-provisioning/src/general/lib/aws/meta_lib.py
index c852ffc..36d6bcc 100644
--- a/infrastructure-provisioning/src/general/lib/aws/meta_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/meta_lib.py
@@ -29,7 +29,7 @@ import sys
 import backoff
 import random
 import string
-from dlab.fab import *
+from datalab.fab import *
 import actions_lib
 
 
diff --git a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
index 2e97003..ed914fd 100644
--- a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
@@ -41,8 +41,8 @@ import logging
 import traceback
 import sys, time
 import os, json
-import dlab.fab
-import dlab.common_lib
+import datalab.fab
+import datalab.common_lib
 import ast
 
 
@@ -493,7 +493,7 @@ class AzureActions:
                                    file=sys.stdout)}))
             traceback.print_exc(file=sys.stdout)
 
-    def create_instance(self, region, instance_size, service_base_name, instance_name, dlab_ssh_user_name, public_key,
+    def create_instance(self, region, instance_size, service_base_name, instance_name, datalab_ssh_user_name, public_key,
                         network_interface_resource_id, resource_group_name, primary_disk_size, instance_type,
                         image_full_name, tags, project_name='', create_option='fromImage', disk_id='',
                         instance_storage_account_type='Premium_LRS', image_type='default'):
@@ -532,12 +532,12 @@ class AzureActions:
                     },
                     'os_profile': {
                         'computer_name': instance_name.replace('_', '-'),
-                        'admin_username': dlab_ssh_user_name,
+                        'admin_username': datalab_ssh_user_name,
                         'linux_configuration': {
                             'disable_password_authentication': True,
                             'ssh': {
                                 'public_keys': [{
-                                    'path': '/home/{}/.ssh/authorized_keys'.format(dlab_ssh_user_name),
+                                    'path': '/home/{}/.ssh/authorized_keys'.format(datalab_ssh_user_name),
                                     'key_data': public_key
                                 }]
                             }
@@ -580,12 +580,12 @@ class AzureActions:
                         },
                         'os_profile': {
                             'computer_name': instance_name.replace('_', '-'),
-                            'admin_username': dlab_ssh_user_name,
+                            'admin_username': datalab_ssh_user_name,
                             'linux_configuration': {
                                 'disable_password_authentication': True,
                                 'ssh': {
                                     'public_keys': [{
-                                        'path': '/home/{}/.ssh/authorized_keys'.format(dlab_ssh_user_name),
+                                        'path': '/home/{}/.ssh/authorized_keys'.format(datalab_ssh_user_name),
                                         'key_data': public_key
                                     }]
                                 }
@@ -687,12 +687,12 @@ class AzureActions:
                     'storage_profile': storage_profile,
                     'os_profile': {
                         'computer_name': instance_name.replace('_', '-'),
-                        'admin_username': dlab_ssh_user_name,
+                        'admin_username': datalab_ssh_user_name,
                         'linux_configuration': {
                             'disable_password_authentication': True,
                             'ssh': {
                                 'public_keys': [{
-                                    'path': '/home/{}/.ssh/authorized_keys'.format(dlab_ssh_user_name),
+                                    'path': '/home/{}/.ssh/authorized_keys'.format(datalab_ssh_user_name),
                                     'key_data': public_key
                                 }]
                             }
@@ -751,12 +751,12 @@ class AzureActions:
                     'storage_profile': storage_profile,
                     'os_profile': {
                         'computer_name': instance_name.replace('_', '-'),
-                        'admin_username': dlab_ssh_user_name,
+                        'admin_username': datalab_ssh_user_name,
                         'linux_configuration': {
                             'disable_password_authentication': True,
                             'ssh': {
                                 'public_keys': [{
-                                    'path': '/home/{}/.ssh/authorized_keys'.format(dlab_ssh_user_name),
+                                    'path': '/home/{}/.ssh/authorized_keys'.format(datalab_ssh_user_name),
                                     'key_data': public_key
                                 }]
                             }
@@ -998,7 +998,7 @@ class AzureActions:
                 sudo('sleep 5')
                 sudo('rm -rf /home/{}/.ensure_dir/dataengine_{}_interpreter_ensured'.format(os_user, cluster_name))
             if exists('/home/{}/.ensure_dir/rstudio_dataengine_ensured'.format(os_user)):
-                dlab.fab.remove_rstudio_dataengines_kernel(os.environ['computational_name'], os_user)
+                datalab.fab.remove_rstudio_dataengines_kernel(os.environ['computational_name'], os_user)
             sudo('rm -rf  /opt/' + cluster_name + '/')
             print("Notebook's {} kernels were removed".format(env.hosts))
         except Exception as err:
@@ -1122,7 +1122,7 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
               /tmp/notebook_spark-defaults_local.conf".format(jars_dir))
         sudo('\cp -f /tmp/notebook_spark-defaults_local.conf /opt/spark/conf/spark-defaults.conf')
         if memory_type == 'driver':
-            spark_memory = dlab.fab.get_spark_memory()
+            spark_memory = datalab.fab.get_spark_memory()
             sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
             sudo('echo "spark.{0}.memory {1}m" >> /opt/spark/conf/spark-defaults.conf'.format(memory_type,
                                                                                               spark_memory))
@@ -1131,7 +1131,7 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
         java_home = run("update-alternatives --query java | grep -o --color=never \'/.*/java-8.*/jre\'").splitlines()[0]
         sudo("echo 'export JAVA_HOME=\'{}\'' >> /opt/spark/conf/spark-env.sh".format(java_home))
         if 'spark_configurations' in os.environ:
-            dlab_header = sudo('cat /tmp/notebook_spark-defaults_local.conf | grep "^#"')
+            datalab_header = sudo('cat /tmp/notebook_spark-defaults_local.conf | grep "^#"')
             spark_configurations = ast.literal_eval(os.environ['spark_configurations'])
             new_spark_defaults = list()
             spark_defaults = sudo('cat /opt/spark/conf/spark-defaults.conf')
@@ -1147,7 +1147,7 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
                                     new_spark_defaults.append(property + ' ' + config['Properties'][property])
                     new_spark_defaults.append(param)
             new_spark_defaults = set(new_spark_defaults)
-            sudo("echo '{}' > /opt/spark/conf/spark-defaults.conf".format(dlab_header))
+            sudo("echo '{}' > /opt/spark/conf/spark-defaults.conf".format(datalab_header))
             for prop in new_spark_defaults:
                 prop = prop.rstrip()
                 sudo('echo "{}" >> /opt/spark/conf/spark-defaults.conf'.format(prop))
@@ -1177,7 +1177,7 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
     else:
         local('cp -f /opt/hadoop/etc/hadoop/core-site.xml {}hadoop/etc/hadoop/core-site.xml'.format(cluster_dir))
     if spark_configs and os.path.exists('{0}'.format(cluster_dir)):
-        dlab_header = local('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name),
+        datalab_header = local('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name),
                             capture=True)
         spark_configurations = ast.literal_eval(spark_configs)
         new_spark_defaults = list()
@@ -1194,7 +1194,7 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
                                 new_spark_defaults.append(property + ' ' + config['Properties'][property])
                 new_spark_defaults.append(param)
         new_spark_defaults = set(new_spark_defaults)
-        local("echo '{0}' > {1}/spark/conf/spark-defaults.conf".format(dlab_header, cluster_dir))
+        local("echo '{0}' > {1}/spark/conf/spark-defaults.conf".format(datalab_header, cluster_dir))
         for prop in new_spark_defaults:
             prop = prop.rstrip()
             local('echo "{0}" >> {1}/spark/conf/spark-defaults.conf'.format(prop, cluster_dir))
@@ -1282,7 +1282,7 @@ def ensure_local_spark(os_user, spark_link, spark_version, hadoop_version, local
                 sudo('mv /opt/hadoop-{0} /opt/hadoop/'.format(hadoop_version))
                 sudo('chown -R {0}:{0} /opt/hadoop/'.format(os_user))
                 # Configuring Hadoop and Spark
-                java_path = dlab.common_lib.find_java_path_remote()
+                java_path = datalab.common_lib.find_java_path_remote()
                 sudo('echo "export JAVA_HOME={}" >> /opt/hadoop/etc/hadoop/hadoop-env.sh'.format(java_path))
                 sudo("""echo 'export HADOOP_CLASSPATH="$HADOOP_HOME/share/hadoop/tools/lib/*"' >> /opt/hadoop/etc/hadoop/hadoop-env.sh""")
                 sudo('echo "export HADOOP_HOME=/opt/hadoop/" >> /opt/spark/conf/spark-env.sh')
@@ -1318,7 +1318,7 @@ def install_dataengine_spark(cluster_name, spark_link, spark_version, hadoop_ver
             local('mv /opt/hadoop-{0} {1}hadoop/'.format(hadoop_version, cluster_dir))
             local('chown -R {0}:{0} {1}hadoop/'.format(os_user, cluster_dir))
             # Configuring Hadoop and Spark
-            java_path = dlab.common_lib.find_java_path_local()
+            java_path = datalab.common_lib.find_java_path_local()
             local('echo "export JAVA_HOME={}" >> {}hadoop/etc/hadoop/hadoop-env.sh'.format(java_path, cluster_dir))
             local("""echo 'export HADOOP_CLASSPATH="$HADOOP_HOME/share/hadoop/tools/lib/*"' >> {}hadoop/etc/hadoop/hadoop-env.sh""".format(cluster_dir))
             local('echo "export HADOOP_HOME={0}hadoop/" >> {0}spark/conf/spark-env.sh'.format(cluster_dir))
diff --git a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
index 91aba5c..6e081e5 100644
--- a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
@@ -25,7 +25,7 @@ from google.cloud import exceptions
 from google.cloud import storage
 from googleapiclient import errors
 import google.auth
-from dlab.fab import *
+from datalab.fab import *
 import meta_lib
 import os
 import json
@@ -35,8 +35,8 @@ import sys, time
 from Crypto.PublicKey import RSA
 from fabric.api import *
 import urllib2
-import dlab.fab
-import dlab.common_lib
+import datalab.fab
+import datalab.common_lib
 import backoff
 import ast
 import random
@@ -1180,7 +1180,7 @@ class GCPActions:
                 sudo('sleep 5')
                 sudo('rm -rf /home/{}/.ensure_dir/dataengine-service_{}_interpreter_ensured'.format(ssh_user, dataproc_name))
             if exists('/home/{}/.ensure_dir/rstudio_dataengine-service_ensured'.format(ssh_user)):
-                dlab.fab.remove_rstudio_dataengines_kernel(computational_name, ssh_user)
+                datalab.fab.remove_rstudio_dataengines_kernel(computational_name, ssh_user)
             sudo('rm -rf  /opt/{0}/{1}/'.format(dataproc_version, dataproc_name))
             print("Notebook's {} kernels were removed".format(env.hosts))
         except Exception as err:
@@ -1404,7 +1404,7 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
             sudo('echo \"spark.jars $(ls -1 ' + jars_dir + '* | tr \'\\n\' \',\')\" >> /tmp/notebook_spark-defaults_local.conf')
         sudo('\cp -f /tmp/notebook_spark-defaults_local.conf /opt/spark/conf/spark-defaults.conf')
         if memory_type == 'driver':
-            spark_memory = dlab.fab.get_spark_memory()
+            spark_memory = datalab.fab.get_spark_memory()
             sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
             sudo('echo "spark.{0}.memory {1}m" >> /opt/spark/conf/spark-defaults.conf'.format(memory_type,
                                                                                               spark_memory))
@@ -1413,7 +1413,7 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
         java_home = run("update-alternatives --query java | grep -o --color=never \'/.*/java-8.*/jre\'").splitlines()[0]
         sudo("echo 'export JAVA_HOME=\'{}\'' >> /opt/spark/conf/spark-env.sh".format(java_home))
         if 'spark_configurations' in os.environ:
-            dlab_header = sudo('cat /tmp/notebook_spark-defaults_local.conf | grep "^#"')
+            datalab_header = sudo('cat /tmp/notebook_spark-defaults_local.conf | grep "^#"')
             spark_configurations = ast.literal_eval(os.environ['spark_configurations'])
             new_spark_defaults = list()
             spark_defaults = sudo('cat /opt/spark/conf/spark-defaults.conf')
@@ -1429,7 +1429,7 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
                                     new_spark_defaults.append(property + ' ' + config['Properties'][property])
                     new_spark_defaults.append(param)
             new_spark_defaults = set(new_spark_defaults)
-            sudo("echo '{}' > /opt/spark/conf/spark-defaults.conf".format(dlab_header))
+            sudo("echo '{}' > /opt/spark/conf/spark-defaults.conf".format(datalab_header))
             for prop in new_spark_defaults:
                 prop = prop.rstrip()
                 sudo('echo "{}" >> /opt/spark/conf/spark-defaults.conf'.format(prop))
@@ -1492,7 +1492,7 @@ def remove_dataengine_kernels(notebook_name, os_user, key_path, cluster_name):
             sudo('sleep 5')
             sudo('rm -rf /home/{}/.ensure_dir/dataengine_{}_interpreter_ensured'.format(os_user, cluster_name))
         if exists('/home/{}/.ensure_dir/rstudio_dataengine_ensured'.format(os_user)):
-            dlab.fab.remove_rstudio_dataengines_kernel(computational_name, os_user)
+            datalab.fab.remove_rstudio_dataengines_kernel(computational_name, os_user)
         sudo('rm -rf  /opt/' + cluster_name + '/')
         print("Notebook's {} kernels were removed".format(env.hosts))
     except Exception as err:
@@ -1525,7 +1525,7 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
                                                                                                         cluster_dir))
     local('cp -f /opt/spark/conf/core-site.xml {}spark/conf/'.format(cluster_dir))
     if spark_configs and os.path.exists('{0}'.format(cluster_dir)):
-        dlab_header = local('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name),
+        datalab_header = local('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name),
                             capture=True)
         spark_configurations = ast.literal_eval(spark_configs)
         new_spark_defaults = list()
@@ -1542,7 +1542,7 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
                                 new_spark_defaults.append(property + ' ' + config['Properties'][property])
                 new_spark_defaults.append(param)
         new_spark_defaults = set(new_spark_defaults)
-        local("echo '{0}' > {1}/spark/conf/spark-defaults.conf".format(dlab_header, cluster_dir))
+        local("echo '{0}' > {1}/spark/conf/spark-defaults.conf".format(datalab_header, cluster_dir))
         for prop in new_spark_defaults:
             prop = prop.rstrip()
             local('echo "{0}" >> {1}/spark/conf/spark-defaults.conf'.format(prop, cluster_dir))
diff --git a/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py b/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py
index 6b7582b..104c9b4 100644
--- a/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py
+++ b/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py
@@ -24,7 +24,7 @@ from googleapiclient.discovery import build
 from google.cloud import storage
 from google.cloud import exceptions
 import google.auth
-from dlab.fab import *
+from datalab.fab import *
 import actions_lib
 import os, re
 from googleapiclient import errors
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/edge_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/edge_lib.py
index 6b0f5d6..20e74c1 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/edge_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/edge_lib.py
@@ -25,10 +25,10 @@ import os
 import sys
 from fabric.api import *
 from fabric.contrib.files import exists
-from dlab.common_lib import manage_pkg
-from dlab.common_lib import install_certbot
-from dlab.common_lib import run_certbot
-from dlab.common_lib import configure_nginx_LE
+from datalab.common_lib import manage_pkg
+from datalab.common_lib import install_certbot
+from datalab.common_lib import run_certbot
+from datalab.common_lib import configure_nginx_LE
 
 
 def configure_http_proxy_server(config):
@@ -86,7 +86,7 @@ def install_nginx_lua(edge_ip, nginx_version, keycloak_auth_server_url, keycloak
                      '--password-file /home/{2}/keys/provisioner_password {4} --output-file /tmp/step_token'.format(
                       os.environ['conf_stepcerts_kid'], os.environ['conf_stepcerts_ca_url'], user, cn, sans))
                 token = sudo('cat /tmp/step_token')
-                sudo('step ca certificate "{0}" /etc/ssl/certs/dlab.crt /etc/ssl/certs/dlab.key '
+                sudo('step ca certificate "{0}" /etc/ssl/certs/datalab.crt /etc/ssl/certs/datalab.key '
                      '--token "{1}" --kty=RSA --size 2048 --provisioner {2} '.format(cn, token,
                                                                                      os.environ['conf_stepcerts_kid']))
                 sudo('touch /var/log/renew_certificates.log')
@@ -94,8 +94,8 @@ def install_nginx_lua(edge_ip, nginx_version, keycloak_auth_server_url, keycloak
                 sudo('chmod +x /usr/local/bin/manage_step_certs.sh')
                 sudo('sed -i "s|STEP_ROOT_CERT_PATH|/etc/ssl/certs/root_ca.crt|g" '
                      '/usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_CERT_PATH|/etc/ssl/certs/dlab.crt|g" /usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_KEY_PATH|/etc/ssl/certs/dlab.key|g" /usr/local/bin/manage_step_certs.sh')
+                sudo('sed -i "s|STEP_CERT_PATH|/etc/ssl/certs/datalab.crt|g" /usr/local/bin/manage_step_certs.sh')
+                sudo('sed -i "s|STEP_KEY_PATH|/etc/ssl/certs/datalab.key|g" /usr/local/bin/manage_step_certs.sh')
                 sudo('sed -i "s|STEP_CA_URL|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(
                     os.environ['conf_stepcerts_ca_url']))
                 sudo('sed -i "s|RESOURCE_TYPE|edge|g" /usr/local/bin/manage_step_certs.sh')
@@ -112,8 +112,8 @@ def install_nginx_lua(edge_ip, nginx_version, keycloak_auth_server_url, keycloak
                 sudo('systemctl daemon-reload')
                 sudo('systemctl enable step-cert-manager.service')
             else:
-                sudo('openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/dlab.key \
-                     -out /etc/ssl/certs/dlab.crt -subj "/C=US/ST=US/L=US/O=dlab/CN={}"'.format(hostname))
+                sudo('openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/datalab.key \
+                     -out /etc/ssl/certs/datalab.crt -subj "/C=US/ST=US/L=US/O=datalab/CN={}"'.format(hostname))
 
             sudo('mkdir -p /tmp/src')
             with cd('/tmp/src/'):
@@ -134,21 +134,21 @@ def install_nginx_lua(edge_ip, nginx_version, keycloak_auth_server_url, keycloak
 
             sudo('useradd -r nginx')
 
-            sudo('mkdir -p /opt/dlab/templates')
-            put('/root/templates', '/opt/dlab', use_sudo=True)
-            sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/dlab/templates/conf.d/proxy.conf'.format(edge_ip))
-            sudo('sed -i \'s|KEYCLOAK_AUTH_URL|{}|g\' /opt/dlab/templates/conf.d/proxy.conf'.format(
+            sudo('mkdir -p /opt/datalab/templates')
+            put('/root/templates', '/opt/datalab', use_sudo=True)
+            sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(edge_ip))
+            sudo('sed -i \'s|KEYCLOAK_AUTH_URL|{}|g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
                 keycloak_auth_server_url))
-            sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/dlab/templates/conf.d/proxy.conf'.format(
+            sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
                 keycloak_realm_name))
-            sudo('sed -i \'s/KEYCLOAK_CLIENT_ID/{}/g\' /opt/dlab/templates/conf.d/proxy.conf'.format(
+            sudo('sed -i \'s/KEYCLOAK_CLIENT_ID/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
                 keycloak_client_id))
-            sudo('sed -i \'s/KEYCLOAK_CLIENT_SECRET/{}/g\' /opt/dlab/templates/conf.d/proxy.conf'.format(
+            sudo('sed -i \'s/KEYCLOAK_CLIENT_SECRET/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
                 keycloak_client_secret))
 
-            sudo('cp /opt/dlab/templates/nginx.conf /usr/local/openresty/nginx/conf')
+            sudo('cp /opt/datalab/templates/nginx.conf /usr/local/openresty/nginx/conf')
             sudo('mkdir /usr/local/openresty/nginx/conf/conf.d')
-            sudo('cp /opt/dlab/templates/conf.d/proxy.conf /usr/local/openresty/nginx/conf/conf.d/')
+            sudo('cp /opt/datalab/templates/conf.d/proxy.conf /usr/local/openresty/nginx/conf/conf.d/')
             sudo('mkdir /usr/local/openresty/nginx/conf/locations')
             sudo('systemctl start openresty')
             sudo('touch /tmp/nginx_installed')
@@ -176,10 +176,10 @@ def configure_nftables(config):
             elif os.environ['conf_cloud_provider'] == 'gcp':
                 interface = 'ens4'
             sudo('sed -i \'s/#net.ipv4.ip_forward=1/net.ipv4.ip_forward=1/g\' /etc/sysctl.conf')
-            sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/dlab/templates/nftables.conf'.format(config['edge_ip']))
-            sudo('sed -i "s|INTERFACE|{}|g" /opt/dlab/templates/nftables.conf'.format(interface))
-            sudo('sed -i "s|SUBNET_CIDR|{}|g" /opt/dlab/templates/nftables.conf'.format(config['exploratory_subnet']))
-            sudo('cp /opt/dlab/templates/nftables.conf /etc/')
+            sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/datalab/templates/nftables.conf'.format(config['edge_ip']))
+            sudo('sed -i "s|INTERFACE|{}|g" /opt/datalab/templates/nftables.conf'.format(interface))
+            sudo('sed -i "s|SUBNET_CIDR|{}|g" /opt/datalab/templates/nftables.conf'.format(config['exploratory_subnet']))
+            sudo('cp /opt/datalab/templates/nftables.conf /etc/')
             sudo('systemctl restart nftables')
             sudo('touch /tmp/nftables_ensured')
     except Exception as err:
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
index ea1bfd5..8370585 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
@@ -28,9 +28,9 @@ import json
 import random
 import string
 import sys
-from dlab.notebook_lib import *
-from dlab.fab import *
-from dlab.common_lib import *
+from datalab.notebook_lib import *
+from datalab.fab import *
+from datalab.common_lib import *
 import backoff
 import os
 import re
@@ -125,7 +125,7 @@ def install_rstudio(os_user, local_spark_path, rstudio_pass, rstudio_version):
             sudo('chown {0}:{0} /mnt/var'.format(os_user))
             http_proxy = run('echo $http_proxy')
             https_proxy = run('echo $https_proxy')
-            sudo("sed -i '/Type=forking/a \Environment=USER=dlab-user' /etc/systemd/system/rstudio-server.service")
+            sudo("sed -i '/Type=forking/a \Environment=USER=datalab-user' /etc/systemd/system/rstudio-server.service")
             sudo("sed -i '/ExecStart/s|=/usr/lib/rstudio-server/bin/rserver|=/bin/bash -c \"export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64; /usr/lib/rstudio-server/bin/rserver --auth-none 1|g' /etc/systemd/system/rstudio-server.service")
             sudo("sed -i '/ExecStart/s|$|\"|g' /etc/systemd/system/rstudio-server.service")
             sudo(
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
index 0c38603..55315f6 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
@@ -24,18 +24,18 @@
 from fabric.api import *
 import crypt
 import yaml
-from dlab.fab import *
-from dlab.meta_lib import *
+from datalab.fab import *
+from datalab.meta_lib import *
 import os
 import json
 import traceback
 import sys
-from dlab.common_lib import manage_pkg
+from datalab.common_lib import manage_pkg
 
 
-def ensure_docker_daemon(dlab_path, os_user, region):
+def ensure_docker_daemon(datalab_path, os_user, region):
     try:
-        if not exists(dlab_path + 'tmp/docker_daemon_ensured'):
+        if not exists(datalab_path + 'tmp/docker_daemon_ensured'):
             docker_version = os.environ['ssn_docker_version']
             sudo('curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -')
             sudo('add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) \
@@ -46,43 +46,43 @@ def ensure_docker_daemon(dlab_path, os_user, region):
             sudo('usermod -a -G docker ' + os_user)
             sudo('update-rc.d docker defaults')
             sudo('update-rc.d docker enable')
-            sudo('touch ' + dlab_path + 'tmp/docker_daemon_ensured')
+            sudo('touch ' + datalab_path + 'tmp/docker_daemon_ensured')
         return True
     except:
         return False
 
 
-def ensure_nginx(dlab_path):
+def ensure_nginx(datalab_path):
     try:
-        if not exists(dlab_path + 'tmp/nginx_ensured'):
+        if not exists(datalab_path + 'tmp/nginx_ensured'):
             manage_pkg('-y install', 'remote', 'nginx')
             sudo('service nginx restart')
             sudo('update-rc.d nginx defaults')
             sudo('update-rc.d nginx enable')
-            sudo('touch ' + dlab_path + 'tmp/nginx_ensured')
+            sudo('touch ' + datalab_path + 'tmp/nginx_ensured')
     except Exception as err:
         traceback.print_exc()
         print('Failed to ensure Nginx: ', str(err))
         sys.exit(1)
 
 
-def ensure_jenkins(dlab_path):
+def ensure_jenkins(datalab_path):
     try:
-        if not exists(dlab_path + 'tmp/jenkins_ensured'):
+        if not exists(datalab_path + 'tmp/jenkins_ensured'):
             sudo('wget -q -O - https://pkg.jenkins.io/debian/jenkins-ci.org.key | apt-key add -')
             sudo('echo deb http://pkg.jenkins.io/debian-stable binary/ > /etc/apt/sources.list.d/jenkins.list')
             manage_pkg('-y update', 'remote', '')
             manage_pkg('-y install', 'remote', 'jenkins')
-            sudo('touch ' + dlab_path + 'tmp/jenkins_ensured')
+            sudo('touch ' + datalab_path + 'tmp/jenkins_ensured')
     except Exception as err:
         traceback.print_exc()
         print('Failed to ensure Jenkins: ', str(err))
         sys.exit(1)
 
 
-def configure_jenkins(dlab_path, os_user, config, tag_resource_id):
+def configure_jenkins(datalab_path, os_user, config, tag_resource_id):
     try:
-        if not exists(dlab_path + 'tmp/jenkins_configured'):
+        if not exists(datalab_path + 'tmp/jenkins_configured'):
             sudo('echo \'JENKINS_ARGS="--prefix=/jenkins --httpPort=8070"\' >> /etc/default/jenkins')
             sudo('rm -rf /var/lib/jenkins/*')
             sudo('mkdir -p /var/lib/jenkins/jobs/')
@@ -93,7 +93,7 @@ def configure_jenkins(dlab_path, os_user, config, tag_resource_id):
             sudo('/etc/init.d/jenkins stop; sleep 5')
             sudo('systemctl enable jenkins')
             sudo('systemctl start jenkins')
-            sudo('touch ' + dlab_path + '/tmp/jenkins_configured')
+            sudo('touch ' + datalab_path + '/tmp/jenkins_configured')
             sudo('echo "jenkins ALL = NOPASSWD:ALL" >> /etc/sudoers')
     except Exception as err:
         traceback.print_exc()
@@ -101,7 +101,7 @@ def configure_jenkins(dlab_path, os_user, config, tag_resource_id):
         sys.exit(1)
 
 
-def configure_nginx(config, dlab_path, hostname):
+def configure_nginx(config, datalab_path, hostname):
     try:
         random_file_part = id_generator(size=20)
         if not exists("/etc/nginx/conf.d/nginx_proxy.conf"):
@@ -110,10 +110,10 @@ def configure_nginx(config, dlab_path, hostname):
             put(config['nginx_template_dir'] + 'ssn_nginx.conf', '/tmp/nginx.conf')
             put(config['nginx_template_dir'] + 'nginx_proxy.conf', '/tmp/nginx_proxy.conf')
             sudo("sed -i 's|SSN_HOSTNAME|" + hostname + "|' /tmp/nginx_proxy.conf")
-            sudo('mv /tmp/nginx.conf ' + dlab_path + 'tmp/')
-            sudo('mv /tmp/nginx_proxy.conf ' + dlab_path + 'tmp/')
-            sudo('\cp ' + dlab_path + 'tmp/nginx.conf /etc/nginx/')
-            sudo('\cp ' + dlab_path + 'tmp/nginx_proxy.conf /etc/nginx/conf.d/')
+            sudo('mv /tmp/nginx.conf ' + datalab_path + 'tmp/')
+            sudo('mv /tmp/nginx_proxy.conf ' + datalab_path + 'tmp/')
+            sudo('\cp ' + datalab_path + 'tmp/nginx.conf /etc/nginx/')
+            sudo('\cp ' + datalab_path + 'tmp/nginx_proxy.conf /etc/nginx/conf.d/')
             sudo('mkdir -p /etc/nginx/locations')
             sudo('rm -f /etc/nginx/sites-enabled/default')
     except Exception as err:
@@ -130,8 +130,8 @@ def configure_nginx(config, dlab_path, hostname):
                     for line in tpl:
                         out.write(line)
             put("/tmp/%s-tmpproxy_location_jenkins_template.conf" % random_file_part, '/tmp/proxy_location_jenkins.conf')
-            sudo('mv /tmp/proxy_location_jenkins.conf ' + os.environ['ssn_dlab_path'] + 'tmp/')
-            sudo('\cp ' + os.environ['ssn_dlab_path'] + 'tmp/proxy_location_jenkins.conf /etc/nginx/locations/')
+            sudo('mv /tmp/proxy_location_jenkins.conf ' + os.environ['ssn_datalab_path'] + 'tmp/')
+            sudo('\cp ' + os.environ['ssn_datalab_path'] + 'tmp/proxy_location_jenkins.conf /etc/nginx/locations/')
             sudo("echo 'engineer:" + crypt.crypt(nginx_password, id_generator()) + "' > /etc/nginx/htpasswd")
             with open('jenkins_creds.txt', 'w+') as f:
                 f.write("Jenkins credentials: engineer  / " + nginx_password)
@@ -147,11 +147,11 @@ def configure_nginx(config, dlab_path, hostname):
 
 def ensure_supervisor():
     try:
-        if not exists(os.environ['ssn_dlab_path'] + 'tmp/superv_ensured'):
+        if not exists(os.environ['ssn_datalab_path'] + 'tmp/superv_ensured'):
             manage_pkg('-y install', 'remote', 'supervisor')
             sudo('update-rc.d supervisor defaults')
             sudo('update-rc.d supervisor enable')
-            sudo('touch ' + os.environ['ssn_dlab_path'] + 'tmp/superv_ensured')
+            sudo('touch ' + os.environ['ssn_datalab_path'] + 'tmp/superv_ensured')
     except Exception as err:
         traceback.print_exc()
         print('Failed to install Supervisor: ', str(err))
@@ -160,31 +160,31 @@ def ensure_supervisor():
 
 def ensure_mongo():
     try:
-        if not exists(os.environ['ssn_dlab_path'] + 'tmp/mongo_ensured'):
+        if not exists(os.environ['ssn_datalab_path'] + 'tmp/mongo_ensured'):
             sudo('wget -qO - https://www.mongodb.org/static/pgp/server-4.2.asc | apt-key add -')
             sudo('ver=`lsb_release -cs`; echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu '
                  '$ver/mongodb-org/4.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-4.2.list; '
                  'apt-get update')
             manage_pkg('-y install', 'remote', 'mongodb-org')
             sudo('systemctl enable mongod.service')
-            sudo('touch ' + os.environ['ssn_dlab_path'] + 'tmp/mongo_ensured')
+            sudo('touch ' + os.environ['ssn_datalab_path'] + 'tmp/mongo_ensured')
     except Exception as err:
         traceback.print_exc()
         print('Failed to install MongoDB: ', str(err))
         sys.exit(1)
 
-def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
+def start_ss(keyfile, host_string, datalab_conf_dir, web_path,
              os_user, mongo_passwd, keystore_passwd, cloud_provider,
              service_base_name, tag_resource_id, billing_tag, account_id, billing_bucket,
-             aws_job_enabled, dlab_path, billing_enabled, cloud_params,
+             aws_job_enabled, datalab_path, billing_enabled, cloud_params,
              authentication_file, offer_number, currency,
              locale, region_info, ldap_login, tenant_id,
              application_id, hostname, data_lake_name, subscription_id,
-             validate_permission_scope, dlab_id, usage_date, product,
+             validate_permission_scope, datalab_id, usage_date, product,
              usage_type, usage, cost, resource_id, tags, billing_dataset_name, keycloak_client_id,
              keycloak_client_secret, keycloak_auth_server_url, report_path=''):
     try:
-        if not exists(os.environ['ssn_dlab_path'] + 'tmp/ss_started'):
+        if not exists(os.environ['ssn_datalab_path'] + 'tmp/ss_started'):
             java_path = sudo("update-alternatives --query java | grep 'Value: ' | grep -o '/.*/jre'")
             supervisor_conf = '/etc/supervisor/conf.d/supervisor_svc.conf'
             local('sed -i "s|MONGO_PASSWORD|{}|g" /root/templates/ssn.yml'.format(mongo_passwd))
@@ -192,16 +192,16 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
             local('sed -i "s|CLOUD_PROVIDER|{}|g" /root/templates/ssn.yml'.format(cloud_provider))
             local('sed -i "s|\${JRE_HOME}|' + java_path + '|g" /root/templates/ssn.yml')
             sudo('sed -i "s|KEYNAME|{}|g" {}/webapp/provisioning-service/conf/provisioning.yml'.
-                 format(os.environ['conf_key_name'], dlab_path))
+                 format(os.environ['conf_key_name'], datalab_path))
             put('/root/templates/ssn.yml', '/tmp/ssn.yml')
-            sudo('mv /tmp/ssn.yml ' + os.environ['ssn_dlab_path'] + 'conf/')
+            sudo('mv /tmp/ssn.yml ' + os.environ['ssn_datalab_path'] + 'conf/')
             put('/root/templates/proxy_location_webapp_template.conf', '/tmp/proxy_location_webapp_template.conf')
-            sudo('mv /tmp/proxy_location_webapp_template.conf ' + os.environ['ssn_dlab_path'] + 'tmp/')
+            sudo('mv /tmp/proxy_location_webapp_template.conf ' + os.environ['ssn_datalab_path'] + 'tmp/')
             if cloud_provider == 'aws':
-                conf_parameter_name = '--spring.config.location={0}billing_app.yml --conf '.format(dlab_conf_dir)
+                conf_parameter_name = '--spring.config.location={0}billing_app.yml --conf '.format(datalab_conf_dir)
                 with open('/root/templates/supervisor_svc.conf', 'r') as f:
                     text = f.read()
-                text = text.replace('WEB_CONF', dlab_conf_dir).replace('OS_USR', os_user)\
+                text = text.replace('WEB_CONF', datalab_conf_dir).replace('OS_USR', os_user)\
                     .replace('CONF_PARAMETER_NAME', conf_parameter_name)
                 with open('/root/templates/supervisor_svc.conf', 'w') as f:
                     f.write(text)
@@ -209,15 +209,15 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
                 conf_parameter_name = '--spring.config.location='
                 with open('/root/templates/supervisor_svc.conf', 'r') as f:
                     text = f.read()
-                text = text.replace('WEB_CONF', dlab_conf_dir).replace('OS_USR', os_user)\
+                text = text.replace('WEB_CONF', datalab_conf_dir).replace('OS_USR', os_user)\
                     .replace('CONF_PARAMETER_NAME', conf_parameter_name)
                 with open('/root/templates/supervisor_svc.conf', 'w') as f:
                     f.write(text)
             put('/root/templates/supervisor_svc.conf', '/tmp/supervisor_svc.conf')
-            sudo('mv /tmp/supervisor_svc.conf ' + os.environ['ssn_dlab_path'] + 'tmp/')
-            sudo('cp ' + os.environ['ssn_dlab_path'] +
+            sudo('mv /tmp/supervisor_svc.conf ' + os.environ['ssn_datalab_path'] + 'tmp/')
+            sudo('cp ' + os.environ['ssn_datalab_path'] +
                  'tmp/proxy_location_webapp_template.conf /etc/nginx/locations/proxy_location_webapp.conf')
-            sudo('cp ' + os.environ['ssn_dlab_path'] + 'tmp/supervisor_svc.conf {}'.format(supervisor_conf))
+            sudo('cp ' + os.environ['ssn_datalab_path'] + 'tmp/supervisor_svc.conf {}'.format(supervisor_conf))
             sudo('sed -i \'s=WEB_APP_DIR={}=\' {}'.format(web_path, supervisor_conf))
             try:
                 sudo('mkdir -p /var/log/application')
@@ -225,7 +225,7 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
                 for service in ['self-service', 'provisioning-service', 'billing']:
                     jar = sudo('cd {0}{1}/lib/; find {1}*.jar -type f'.format(web_path, service))
                     sudo('ln -s {0}{2}/lib/{1} {0}{2}/{2}.jar '.format(web_path, jar, service))
-                    sudo('cp {0}/webapp/{1}/conf/*.yml /tmp/yml_tmp/'.format(dlab_path, service))
+                    sudo('cp {0}/webapp/{1}/conf/*.yml /tmp/yml_tmp/'.format(datalab_path, service))
                 # Replacing Keycloak and cloud parameters
                 for item in json.loads(cloud_params):
                     if "KEYCLOAK_" in item['key']:
@@ -241,7 +241,7 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
                     sudo('sed -i "s|<LOGIN_USE_LDAP>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(ldap_login))
                     sudo('sed -i "s|<LOGIN_TENANT_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(tenant_id))
                     sudo('sed -i "s|<LOGIN_APPLICATION_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(application_id))
-                    sudo('sed -i "s|<DLAB_SUBSCRIPTION_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(subscription_id))
+                    sudo('sed -i "s|<DATA_LAB_SUBSCRIPTION_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(subscription_id))
                     sudo('sed -i "s|<MANAGEMENT_API_AUTH_FILE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
                         authentication_file))
                     sudo('sed -i "s|<VALIDATE_PERMISSION_SCOPE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
@@ -256,7 +256,7 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
                     #     permission_scope = 'subscriptions/{}/resourceGroups/{}/providers/Microsoft.Authorization/'.format(
                     #         subscription_id, service_base_name
                     #     )
-                sudo('mv /tmp/yml_tmp/* ' + dlab_conf_dir)
+                sudo('mv /tmp/yml_tmp/* ' + datalab_conf_dir)
                 sudo('rmdir /tmp/yml_tmp/')
             except:
                 append_result("Unable to upload webapp jars")
@@ -273,13 +273,13 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
                          '--aws_job_enabled {} ' \
                          '--report_path "{}" ' \
                          '--mongo_password {} ' \
-                         '--dlab_dir {} ' \
+                         '--datalab_dir {} ' \
                          '--authentication_file "{}" ' \
                          '--offer_number {} ' \
                          '--currency {} ' \
                          '--locale {} ' \
                          '--region_info {} ' \
-                         '--dlab_id {} ' \
+                         '--datalab_id {} ' \
                          '--usage_date {} ' \
                          '--product {} ' \
                          '--usage_type {} ' \
@@ -305,13 +305,13 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
                                    aws_job_enabled,
                                    report_path,
                                    mongo_passwd,
-                                   dlab_path,
+                                   datalab_path,
                                    authentication_file,
                                    offer_number,
                                    currency,
                                    locale,
                                    region_info,
-                                   dlab_id,
+                                   datalab_id,
                                    usage_date,
                                    product,
                                    usage_type,
@@ -329,7 +329,7 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
                 sudo('python /tmp/configure_billing.py {}'.format(params))
             try:
                 if os.environ['conf_stepcerts_enabled'] == 'true':
-                    sudo('openssl pkcs12 -export -in /etc/ssl/certs/dlab.crt -inkey /etc/ssl/certs/dlab.key -name ssn '
+                    sudo('openssl pkcs12 -export -in /etc/ssl/certs/datalab.crt -inkey /etc/ssl/certs/datalab.key -name ssn '
                          '-out ssn.p12 -password pass:{0}'.format(keystore_passwd))
                     sudo('keytool -importkeystore -srckeystore ssn.p12 -srcstoretype PKCS12 -alias ssn -destkeystore '
                          '/home/{0}/keys/ssn.keystore.jks -deststorepass "{1}" -srcstorepass "{1}"'.format(
@@ -339,15 +339,15 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
                           os_user, keystore_passwd))
                     sudo('keytool -importcert -trustcacerts -alias step-ca -file /etc/ssl/certs/root_ca.crt '
                          '-noprompt -storepass changeit -keystore {1}/lib/security/cacerts'.format(os_user, java_path))
-                    sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/dlab.crt -noprompt '
+                    sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt '
                          '-storepass changeit -keystore {0}/lib/security/cacerts'.format(java_path))
                 else:
                     sudo('keytool -genkeypair -alias ssn -keyalg RSA -validity 730 -storepass {1} -keypass {1} \
                          -keystore /home/{0}/keys/ssn.keystore.jks -keysize 2048 -dname "CN=localhost"'.format(
                          os_user, keystore_passwd))
-                    sudo('keytool -exportcert -alias ssn -storepass {1} -file /etc/ssl/certs/dlab.crt \
+                    sudo('keytool -exportcert -alias ssn -storepass {1} -file /etc/ssl/certs/datalab.crt \
                          -keystore /home/{0}/keys/ssn.keystore.jks'.format(os_user, keystore_passwd))
-                    sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/dlab.crt -noprompt \
+                    sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt \
                          -storepass changeit -keystore {1}/lib/security/cacerts'.format(os_user, java_path))
             except:
                 append_result("Unable to generate cert and copy to java keystore")
@@ -355,7 +355,7 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
             sudo('service supervisor start')
             sudo('service nginx restart')
             sudo('service supervisor restart')
-            sudo('touch ' + os.environ['ssn_dlab_path'] + 'tmp/ss_started')
+            sudo('touch ' + os.environ['ssn_datalab_path'] + 'tmp/ss_started')
     except Exception as err:
         traceback.print_exc()
         print('Failed to start Self-service: ', str(err))
@@ -364,7 +364,7 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
 
 def install_build_dep():
     try:
-        if not exists('{}tmp/build_dep_ensured'.format(os.environ['ssn_dlab_path'])):
+        if not exists('{}tmp/build_dep_ensured'.format(os.environ['ssn_datalab_path'])):
             maven_version = '3.5.4'
             manage_pkg('-y install', 'remote', 'openjdk-8-jdk git wget unzip')
             with cd('/opt/'):
@@ -375,7 +375,7 @@ def install_build_dep():
             sudo('bash -c "curl --silent --location https://deb.nodesource.com/setup_12.x | bash -"')
             manage_pkg('-y install', 'remote', 'nodejs')
             sudo('npm config set unsafe-perm=true')
-            sudo('touch {}tmp/build_dep_ensured'.format(os.environ['ssn_dlab_path']))
+            sudo('touch {}tmp/build_dep_ensured'.format(os.environ['ssn_datalab_path']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to install build dependencies for UI: ', str(err))
diff --git a/infrastructure-provisioning/src/general/lib/os/fab.py b/infrastructure-provisioning/src/general/lib/os/fab.py
index 6758c1f..7bb1f47 100644
--- a/infrastructure-provisioning/src/general/lib/os/fab.py
+++ b/infrastructure-provisioning/src/general/lib/os/fab.py
@@ -27,12 +27,12 @@ import random
 import sys
 import string
 import json, uuid, time, datetime, csv
-from dlab.meta_lib import *
-from dlab.actions_lib import *
-import dlab.actions_lib
+from datalab.meta_lib import *
+from datalab.actions_lib import *
+import datalab.actions_lib
 import re
 import traceback
-from dlab.common_lib import *
+from datalab.common_lib import *
 
 
 def ensure_pip(requisites):
@@ -179,12 +179,12 @@ def append_result(error, exception=''):
     print(data)
 
 
-def put_resource_status(resource, status, dlab_path, os_user, hostname):
+def put_resource_status(resource, status, datalab_path, os_user, hostname):
     env['connection_attempts'] = 100
     keyfile = os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem"
     env.key_filename = [keyfile]
     env.host_string = os_user + '@' + hostname
-    sudo('python ' + dlab_path + 'tmp/resource_status.py --resource {} --status {}'.format(resource, status))
+    sudo('python ' + datalab_path + 'tmp/resource_status.py --resource {} --status {}'.format(resource, status))
 
 
 def configure_jupyter(os_user, jupyter_conf_file, templates_dir, jupyter_version, exploratory_name):
@@ -295,7 +295,7 @@ def ensure_jupyterlab_files(os_user, jupyterlab_dir, jupyterlab_image, jupyter_c
             sudo('echo \'c.NotebookApp.cookie_secret = b"{0}"\' >> {1}'.format(id_generator(), jupyter_conf_file))
             sudo('''echo "c.NotebookApp.token = u''" >> {}'''.format(jupyter_conf_file))
             sudo('echo \'c.KernelSpecManager.ensure_native_kernel = False\' >> {}'.format(jupyter_conf_file))
-            sudo('chown dlab-user:dlab-user /opt')
+            sudo('chown datalab-user:datalab-user /opt')
             sudo('echo -e "Host git.epam.com\n   HostName git.epam.com\n   ProxyCommand nc -X connect -x {}:3128 %h %p\n" > /home/{}/.ssh/config'.format(edge_ip, os_user))
             sudo('echo -e "Host github.com\n   HostName github.com\n   ProxyCommand nc -X connect -x {}:3128 %h %p" >> /home/{}/.ssh/config'.format(edge_ip, os_user))
 #            sudo('touch {}'.format(spark_script))
@@ -304,14 +304,14 @@ def ensure_jupyterlab_files(os_user, jupyterlab_dir, jupyterlab_image, jupyter_c
 #                'echo "PYJ=\`find /opt/spark/ -name \'*py4j*.zip\' | tr \'\\n\' \':\' | sed \'s|:$||g\'\`; sed -i \'s|PY4J|\'$PYJ\'|g\' /tmp/pyspark_local_template.json" >> {}'.format(
 #                spark_script))
 #            sudo(
-#                'echo "sed -i \'14s/:",/:\\/home\\/dlab-user\\/caffe\\/python:\\/home\\/dlab-user\\/pytorch\\/build:",/\' /tmp/pyspark_local_template.json" >> {}'.format(
+#                'echo "sed -i \'14s/:",/:\\/home\\/datalab-user\\/caffe\\/python:\\/home\\/datalab-user\\/pytorch\\/build:",/\' /tmp/pyspark_local_template.json" >> {}'.format(
 #                    spark_script))
 #            sudo('echo \'sed -i "s|SP_VER|{}|g" /tmp/pyspark_local_template.json\' >> {}'.format(os.environ['notebook_spark_version'], spark_script))
 #            sudo(
 #                'echo "PYJ=\`find /opt/spark/ -name \'*py4j*.zip\' | tr \'\\n\' \':\' | sed \'s|:$||g\'\`; sed -i \'s|PY4J|\'$PYJ\'|g\' /tmp/py3spark_local_template.json" >> {}'.format(
 #                spark_script))
 #            sudo(
-#                'echo "sed -i \'14s/:",/:\\/home\\/dlab-user\\/caffe\\/python:\\/home\\/dlab-user\\/pytorch\\/build:",/\' /tmp/py3spark_local_template.json" >> {}'.format(
+#                'echo "sed -i \'14s/:",/:\\/home\\/datalab-user\\/caffe\\/python:\\/home\\/datalab-user\\/pytorch\\/build:",/\' /tmp/py3spark_local_template.json" >> {}'.format(
 #                    spark_script))
 #            sudo('echo \'sed -i "s|SP_VER|{}|g" /tmp/py3spark_local_template.json\' >> {}'.format(os.environ['notebook_spark_version'], spark_script))
 #            sudo('echo "cp /tmp/pyspark_local_template.json /home/{}/.local/share/jupyter/kernels/pyspark_local/kernel.json" >> {}'.format(os_user, spark_script))
@@ -507,7 +507,7 @@ def update_spark_jars(jars_dir='/opt/jars'):
                     des_path = '/'.join(conf.split('/')[:3])
                     all_jars = find_des_jars(all_jars, des_path)
                 sudo('''sed -i '/^# Generated\|^spark.jars/d' {0}'''.format(conf))
-                sudo('echo "# Generated spark.jars by DLab from {0}\nspark.jars {1}" >> {2}'
+                sudo('echo "# Generated spark.jars by Data Lab from {0}\nspark.jars {1}" >> {2}'
                      .format(','.join(filter(None, [jars_dir, des_path])), ','.join(all_jars), conf))
                 # sudo("sed -i 's/^[[:space:]]*//' {0}".format(conf))
         else:
@@ -680,7 +680,7 @@ def set_git_proxy(os_user, hostname, keyfile, proxy_host):
 
 def set_mongo_parameters(client, mongo_parameters):
     for i in mongo_parameters:
-        client.dlabdb.settings.insert_one({"_id": i, "value": mongo_parameters[i]})
+        client.datalabdb.settings.insert_one({"_id": i, "value": mongo_parameters[i]})
 
 
 def install_r_packages(os_user):
@@ -950,25 +950,25 @@ def configure_superset(os_user, keycloak_auth_server_url, keycloak_realm_name, k
                 sudo('tar -xzf {}.tar.gz'.format(os.environ['notebook_superset_version']))
                 sudo('ln -sf incubator-superset-{} incubator-superset'.format(os.environ['notebook_superset_version']))
         if not exists('/tmp/superset-notebook_installed'):
-            sudo('mkdir -p /opt/dlab/templates')
-            put('/root/templates', '/opt/dlab', use_sudo=True)
-            sudo('sed -i \'s/OS_USER/{}/g\' /opt/dlab/templates/.env'.format(os_user))
+            sudo('mkdir -p /opt/datalab/templates')
+            put('/root/templates', '/opt/datalab', use_sudo=True)
+            sudo('sed -i \'s/OS_USER/{}/g\' /opt/datalab/templates/.env'.format(os_user))
             proxy_string = '{}:3128'.format(edge_instance_private_ip)
-            sudo('sed -i \'s|KEYCLOAK_AUTH_SERVER_URL|{}|g\' /opt/dlab/templates/id_provider.json'.format(keycloak_auth_server_url))
-            sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/dlab/templates/id_provider.json'.format(keycloak_realm_name))
-            sudo('sed -i \'s/CLIENT_ID/{}/g\' /opt/dlab/templates/id_provider.json'.format(keycloak_client_id))
-            sudo('sed -i \'s/CLIENT_SECRET/{}/g\' /opt/dlab/templates/id_provider.json'.format(keycloak_client_secret))
-            sudo('sed -i \'s/PROXY_STRING/{}/g\' /opt/dlab/templates/docker-compose.yml'.format(proxy_string))
-            sudo('sed -i \'s|KEYCLOAK_AUTH_SERVER_URL|{}|g\' /opt/dlab/templates/superset_config.py'.format(keycloak_auth_server_url))
-            sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/dlab/templates/superset_config.py'.format(keycloak_realm_name))
-            sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/dlab/templates/superset_config.py'.format(edge_instance_public_ip))
-            sudo('sed -i \'s/SUPERSET_NAME/{}/g\' /opt/dlab/templates/superset_config.py'.format(superset_name))
-            sudo('cp -f /opt/dlab/templates/.env /home/{}/incubator-superset/contrib/docker/'.format(os_user))
-            sudo('cp -f /opt/dlab/templates/docker-compose.yml /home/{}/incubator-superset/contrib/docker/'.format(os_user))
-            sudo('cp -f /opt/dlab/templates/id_provider.json /home/{}/incubator-superset/contrib/docker/'.format(os_user))
-            sudo('cp -f /opt/dlab/templates/requirements-extra.txt /home/{}/incubator-superset/contrib/docker/'.format(os_user))
-            sudo('cp -f /opt/dlab/templates/superset_config.py /home/{}/incubator-superset/contrib/docker/'.format(os_user))
-            sudo('cp -f /opt/dlab/templates/docker-init.sh /home/{}/incubator-superset/contrib/docker/'.format(os_user))
+            sudo('sed -i \'s|KEYCLOAK_AUTH_SERVER_URL|{}|g\' /opt/datalab/templates/id_provider.json'.format(keycloak_auth_server_url))
+            sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/datalab/templates/id_provider.json'.format(keycloak_realm_name))
+            sudo('sed -i \'s/CLIENT_ID/{}/g\' /opt/datalab/templates/id_provider.json'.format(keycloak_client_id))
+            sudo('sed -i \'s/CLIENT_SECRET/{}/g\' /opt/datalab/templates/id_provider.json'.format(keycloak_client_secret))
+            sudo('sed -i \'s/PROXY_STRING/{}/g\' /opt/datalab/templates/docker-compose.yml'.format(proxy_string))
+            sudo('sed -i \'s|KEYCLOAK_AUTH_SERVER_URL|{}|g\' /opt/datalab/templates/superset_config.py'.format(keycloak_auth_server_url))
+            sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/datalab/templates/superset_config.py'.format(keycloak_realm_name))
+            sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/datalab/templates/superset_config.py'.format(edge_instance_public_ip))
+            sudo('sed -i \'s/SUPERSET_NAME/{}/g\' /opt/datalab/templates/superset_config.py'.format(superset_name))
+            sudo('cp -f /opt/datalab/templates/.env /home/{}/incubator-superset/contrib/docker/'.format(os_user))
+            sudo('cp -f /opt/datalab/templates/docker-compose.yml /home/{}/incubator-superset/contrib/docker/'.format(os_user))
+            sudo('cp -f /opt/datalab/templates/id_provider.json /home/{}/incubator-superset/contrib/docker/'.format(os_user))
+            sudo('cp -f /opt/datalab/templates/requirements-extra.txt /home/{}/incubator-superset/contrib/docker/'.format(os_user))
+            sudo('cp -f /opt/datalab/templates/superset_config.py /home/{}/incubator-superset/contrib/docker/'.format(os_user))
+            sudo('cp -f /opt/datalab/templates/docker-init.sh /home/{}/incubator-superset/contrib/docker/'.format(os_user))
             sudo('touch /tmp/superset-notebook_installed')
     except Exception as err:
         print("Failed configure superset: " + str(err))
diff --git a/infrastructure-provisioning/src/general/lib/os/redhat/edge_lib.py b/infrastructure-provisioning/src/general/lib/os/redhat/edge_lib.py
index 3ee832e..44a178a 100644
--- a/infrastructure-provisioning/src/general/lib/os/redhat/edge_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/redhat/edge_lib.py
@@ -25,7 +25,7 @@ import os
 import sys
 from fabric.api import *
 from fabric.contrib.files import exists
-from dlab.common_lib import manage_pkg
+from datalab.common_lib import manage_pkg
 
 
 def configure_http_proxy_server(config):
@@ -85,7 +85,7 @@ def install_nginx_lua(edge_ip, nginx_version, keycloak_auth_server_url, keycloak
                      '--password-file /home/{2}/keys/provisioner_password {4} --output-file /tmp/step_token'.format(
                       os.environ['conf_stepcerts_kid'], os.environ['conf_stepcerts_ca_url'], user, cn, sans))
                 token = sudo('cat /tmp/step_token')
-                sudo('step ca certificate "{0}" /etc/ssl/certs/dlab.crt /etc/ssl/certs/dlab.key '
+                sudo('step ca certificate "{0}" /etc/ssl/certs/datalab.crt /etc/ssl/certs/datalab.key '
                      '--token "{1}" --kty=RSA --size 2048 --provisioner {2} '.format(cn, token,
                                                                                      os.environ['conf_stepcerts_kid']))
                 sudo('touch /var/log/renew_certificates.log')
@@ -93,8 +93,8 @@ def install_nginx_lua(edge_ip, nginx_version, keycloak_auth_server_url, keycloak
                 sudo('chmod +x /usr/local/bin/manage_step_certs.sh')
                 sudo('sed -i "s|STEP_ROOT_CERT_PATH|/etc/ssl/certs/root_ca.crt|g" '
                      '/usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_CERT_PATH|/etc/ssl/certs/dlab.crt|g" /usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_KEY_PATH|/etc/ssl/certs/dlab.key|g" /usr/local/bin/manage_step_certs.sh')
+                sudo('sed -i "s|STEP_CERT_PATH|/etc/ssl/certs/datalab.crt|g" /usr/local/bin/manage_step_certs.sh')
+                sudo('sed -i "s|STEP_KEY_PATH|/etc/ssl/certs/datalab.key|g" /usr/local/bin/manage_step_certs.sh')
                 sudo('sed -i "s|STEP_CA_URL|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(
                     os.environ['conf_stepcerts_ca_url']))
                 sudo('sed -i "s|RESOURCE_TYPE|edge|g" /usr/local/bin/manage_step_certs.sh')
@@ -112,9 +112,9 @@ def install_nginx_lua(edge_ip, nginx_version, keycloak_auth_server_url, keycloak
                 sudo('systemctl enable step-cert-manager.service')
             else:
                 if os.environ['conf_letsencrypt_enabled'] == 'true':
-                    print('Lets Encrypt certificates are not supported for redhat in dlab. Using self signed certificates')
-                sudo('openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/dlab.key \
-                     -out /etc/ssl/certs/dlab.crt -subj "/C=US/ST=US/L=US/O=dlab/CN={}"'.format(hostname))
+                    print('Lets Encrypt certificates are not supported for redhat in Data Lab. Using self signed certificates')
+                sudo('openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/datalab.key \
+                     -out /etc/ssl/certs/datalab.crt -subj "/C=US/ST=US/L=US/O=datalab/CN={}"'.format(hostname))
             sudo('mkdir -p /tmp/lua')
             sudo('mkdir -p /tmp/src')
             with cd('/tmp/src/'):
@@ -166,19 +166,19 @@ def install_nginx_lua(edge_ip, nginx_version, keycloak_auth_server_url, keycloak
 
             sudo('useradd -r nginx')
             sudo('rm -f /etc/nginx/nginx.conf')
-            sudo('mkdir -p /opt/dlab/templates')
-            put('/root/templates', '/opt/dlab', use_sudo=True)
-            sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/dlab/templates/conf.d/proxy.conf'.format(edge_ip))
-            sudo('sed -i \'s|KEYCLOAK_AUTH_URL|{}|g\' /opt/dlab/templates/conf.d/proxy.conf'.format(keycloak_auth_server_url))
-            sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/dlab/templates/conf.d/proxy.conf'.format(keycloak_realm_name))
-            sudo('sed -i \'s/KEYCLOAK_CLIENT_ID/{}/g\' /opt/dlab/templates/conf.d/proxy.conf'.format(keycloak_client_id))
-            sudo('sed -i \'s/KEYCLOAK_CLIENT_SECRET/{}/g\' /opt/dlab/templates/conf.d/proxy.conf'.format(keycloak_client_secret))
-
-            sudo('cp /opt/dlab/templates/nginx.conf /etc/nginx/')
+            sudo('mkdir -p /opt/datalab/templates')
+            put('/root/templates', '/opt/datalab', use_sudo=True)
+            sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(edge_ip))
+            sudo('sed -i \'s|KEYCLOAK_AUTH_URL|{}|g\' /opt/datalab/templates/conf.d/proxy.conf'.format(keycloak_auth_server_url))
+            sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(keycloak_realm_name))
+            sudo('sed -i \'s/KEYCLOAK_CLIENT_ID/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(keycloak_client_id))
+            sudo('sed -i \'s/KEYCLOAK_CLIENT_SECRET/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(keycloak_client_secret))
+
+            sudo('cp /opt/datalab/templates/nginx.conf /etc/nginx/')
             sudo('mkdir /etc/nginx/conf.d')
-            sudo('cp /opt/dlab/templates/conf.d/proxy.conf /etc/nginx/conf.d/')
+            sudo('cp /opt/datalab/templates/conf.d/proxy.conf /etc/nginx/conf.d/')
             sudo('mkdir /etc/nginx/locations')
-            sudo('cp /opt/dlab/templates/nginx_redhat /etc/init.d/nginx')
+            sudo('cp /opt/datalab/templates/nginx_redhat /etc/init.d/nginx')
             sudo('chmod +x /etc/init.d/nginx')
             sudo('chkconfig --add nginx')
             sudo('chkconfig --level 345 nginx on')
diff --git a/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py b/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
index 1cee42e..5f6b7e7 100644
--- a/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
@@ -28,10 +28,10 @@ import json
 import random
 import string
 import sys
-from dlab.notebook_lib import *
-from dlab.fab import *
+from datalab.notebook_lib import *
+from datalab.fab import *
 import os, time
-from dlab.common_lib import manage_pkg
+from datalab.common_lib import manage_pkg
 
 
 def enable_proxy(proxy_host, proxy_port):
@@ -112,7 +112,7 @@ def install_rstudio(os_user, local_spark_path, rstudio_pass, rstudio_version):
             manage_pkg('-y install --nogpgcheck', 'remote', 'https://download2.rstudio.org/server/centos6/x86_64/rstudio-server-rhel-{}-x86_64.rpm'.format(rstudio_version))
             sudo('mkdir -p /mnt/var')
             sudo('chown {0}:{0} /mnt/var'.format(os_user))
-            sudo("sed -i '/Type=forking/a \Environment=USER=dlab-user' /etc/systemd/system/rstudio-server.service")
+            sudo("sed -i '/Type=forking/a \Environment=USER=datalab-user' /etc/systemd/system/rstudio-server.service")
             sudo("sed -i '/ExecStart/s|=/usr/lib/rstudio-server/bin/rserver|=/bin/bash -c \"export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64; /usr/lib/rstudio-server/bin/rserver --auth-none 1|g' /etc/systemd/system/rstudio-server.service")
             sudo("sed -i '/ExecStart/s|$|\"|g' /etc/systemd/system/rstudio-server.service")
             sudo("systemctl daemon-reload")
diff --git a/infrastructure-provisioning/src/general/lib/os/redhat/ssn_lib.py b/infrastructure-provisioning/src/general/lib/os/redhat/ssn_lib.py
index a0022da..247978d 100644
--- a/infrastructure-provisioning/src/general/lib/os/redhat/ssn_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/redhat/ssn_lib.py
@@ -24,18 +24,18 @@
 from fabric.api import *
 import crypt
 import yaml
-from dlab.fab import *
-from dlab.meta_lib import *
+from datalab.fab import *
+from datalab.meta_lib import *
 import os
 import json
 import sys
 import traceback
-from dlab.common_lib import manage_pkg
+from datalab.common_lib import manage_pkg
 
 
-def ensure_docker_daemon(dlab_path, os_user, region):
+def ensure_docker_daemon(datalab_path, os_user, region):
     try:
-        if not exists('{}tmp/docker_daemon_ensured'.format(dlab_path)):
+        if not exists('{}tmp/docker_daemon_ensured'.format(datalab_path)):
             docker_version = os.environ['ssn_docker_version']
             if region == 'cn-north-1':
                 mirror = 'mirror.lzu.edu.cn'
@@ -55,28 +55,28 @@ def ensure_docker_daemon(dlab_path, os_user, region):
             sudo('usermod -aG docker {}'.format(os_user))
             sudo('systemctl enable docker.service')
             sudo('systemctl start docker')
-            sudo('touch {}tmp/docker_daemon_ensured'.format(dlab_path))
+            sudo('touch {}tmp/docker_daemon_ensured'.format(datalab_path))
         return True
     except:
         return False
 
 
-def ensure_nginx(dlab_path):
+def ensure_nginx(datalab_path):
     try:
-        if not exists('{}tmp/nginx_ensured'.format(dlab_path)):
+        if not exists('{}tmp/nginx_ensured'.format(datalab_path)):
             manage_pkg('-y install', 'remote', 'nginx')
             sudo('systemctl restart nginx.service')
             sudo('chkconfig nginx on')
-            sudo('touch {}tmp/nginx_ensured'.format(dlab_path))
+            sudo('touch {}tmp/nginx_ensured'.format(datalab_path))
     except Exception as err:
         traceback.print_exc()
         print('Failed to ensure Nginx: ', str(err))
         sys.exit(1)
 
 
-def ensure_jenkins(dlab_path):
+def ensure_jenkins(datalab_path):
     try:
-        if not exists('{}tmp/jenkins_ensured'.format(dlab_path)):
+        if not exists('{}tmp/jenkins_ensured'.format(datalab_path)):
             sudo('wget -O /etc/yum.repos.d/jenkins.repo https://pkg.jenkins.io/redhat-stable/jenkins.repo')
             try:
                 sudo('rpm --import https://pkg.jenkins.io/redhat-stable/jenkins.io.key')
@@ -84,16 +84,16 @@ def ensure_jenkins(dlab_path):
                 pass
             manage_pkg('-y install', 'remote', 'jenkins')
             manage_pkg('-y install', 'remote', 'policycoreutils-python')
-            sudo('touch {}tmp/jenkins_ensured'.format(dlab_path))
+            sudo('touch {}tmp/jenkins_ensured'.format(datalab_path))
     except Exception as err:
         traceback.print_exc()
         print('Failed to ensure Jenkins: ', str(err))
         sys.exit(1)
 
 
-def configure_jenkins(dlab_path, os_user, config, tag_resource_id):
+def configure_jenkins(datalab_path, os_user, config, tag_resource_id):
     try:
-        if not exists('{}tmp/jenkins_configured'.format(dlab_path)):
+        if not exists('{}tmp/jenkins_configured'.format(datalab_path)):
             sudo('rm -rf /var/lib/jenkins/*')
             sudo('mkdir -p /var/lib/jenkins/jobs/')
             sudo('chown -R {0}:{0} /var/lib/jenkins/'.format(os_user))
@@ -109,14 +109,14 @@ def configure_jenkins(dlab_path, os_user, config, tag_resource_id):
             sudo('chkconfig jenkins on')
             sudo('systemctl start jenkins.service')
             sudo('echo "jenkins ALL = NOPASSWD:ALL" >> /etc/sudoers')
-            sudo('touch {}tmp/jenkins_configured'.format(dlab_path))
+            sudo('touch {}tmp/jenkins_configured'.format(datalab_path))
     except Exception as err:
         traceback.print_exc()
         print('Failed to configure Jenkins: ', str(err))
         sys.exit(1)
 
 
-def configure_nginx(config, dlab_path, hostname):
+def configure_nginx(config, datalab_path, hostname):
     try:
         random_file_part = id_generator(size=20)
         if not exists("/etc/nginx/conf.d/nginx_proxy.conf"):
@@ -125,8 +125,8 @@ def configure_nginx(config, dlab_path, hostname):
             put(config['nginx_template_dir'] + 'ssn_nginx.conf', '/tmp/nginx.conf')
             sudo("sed -i 's|SSN_HOSTNAME|" + hostname + "|' /tmp/nginx_proxy.conf")
             sudo('cat /tmp/nginx.conf > /etc/nginx/nginx.conf')
-            sudo('mv /tmp/nginx_proxy.conf ' + dlab_path + 'tmp/')
-            sudo('\cp ' + dlab_path + 'tmp/nginx_proxy.conf /etc/nginx/conf.d/')
+            sudo('mv /tmp/nginx_proxy.conf ' + datalab_path + 'tmp/')
+            sudo('\cp ' + datalab_path + 'tmp/nginx_proxy.conf /etc/nginx/conf.d/')
             sudo('mkdir -p /etc/nginx/locations')
             sudo('rm -f /etc/nginx/sites-enabled/default')
     except Exception as err:
@@ -160,12 +160,12 @@ def configure_nginx(config, dlab_path, hostname):
 
 def ensure_supervisor():
     try:
-        if not exists('{}tmp/superv_ensured'.format(os.environ['ssn_dlab_path'])):
+        if not exists('{}tmp/superv_ensured'.format(os.environ['ssn_datalab_path'])):
             manage_pkg('-y install', 'remote', 'supervisor')
             #sudo('pip install supervisor')
             sudo('chkconfig supervisord on')
             sudo('systemctl start supervisord')
-            sudo('touch {}tmp/superv_ensured'.format(os.environ['ssn_dlab_path']))
+            sudo('touch {}tmp/superv_ensured'.format(os.environ['ssn_datalab_path']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to install supervisor: ', str(err))
@@ -174,7 +174,7 @@ def ensure_supervisor():
 
 def ensure_mongo():
     try:
-        if not exists('{}tmp/mongo_ensured'.format(os.environ['ssn_dlab_path'])):
+        if not exists('{}tmp/mongo_ensured'.format(os.environ['ssn_datalab_path'])):
             sudo('echo -e "[mongodb-org-3.2]\nname=MongoDB Repository'
                  '\nbaseurl=https://repo.mongodb.org/yum/redhat/7/mongodb-org/3.2/x86_64/'
                  '\ngpgcheck=1'
@@ -187,25 +187,25 @@ def ensure_mongo():
             sudo('echo "d /var/run/mongodb 0755 mongod mongod" > /lib/tmpfiles.d/mongodb.conf')
             sudo('sudo systemd-tmpfiles --create mongodb.conf')
             sudo('systemctl start mongod.service')
-            sudo('touch {}tmp/mongo_ensured'.format(os.environ['ssn_dlab_path']))
+            sudo('touch {}tmp/mongo_ensured'.format(os.environ['ssn_datalab_path']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to install MongoDB: ', str(err))
         sys.exit(1)
 
 
-def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
+def start_ss(keyfile, host_string, datalab_conf_dir, web_path,
              os_user, mongo_passwd, keystore_passwd, cloud_provider,
              service_base_name, tag_resource_id, billing_tag, account_id, billing_bucket,
-             aws_job_enabled, dlab_path, billing_enabled, cloud_params,
+             aws_job_enabled, datalab_path, billing_enabled, cloud_params,
              authentication_file, offer_number, currency,
              locale, region_info, ldap_login, tenant_id,
              application_id, hostname, data_lake_name, subscription_id,
-             validate_permission_scope, dlab_id, usage_date, product,
+             validate_permission_scope, datalab_id, usage_date, product,
              usage_type, usage, cost, resource_id, tags, billing_dataset_name, keycloak_client_id,
              keycloak_client_secret, keycloak_auth_server_url, report_path=''):
     try:
-        if not exists('{}tmp/ss_started'.format(os.environ['ssn_dlab_path'])):
+        if not exists('{}tmp/ss_started'.format(os.environ['ssn_datalab_path'])):
             java_path = sudo("alternatives --display java | grep 'slave jre: ' | awk '{print $3}'")
             supervisor_conf = '/etc/supervisord.d/supervisor_svc.ini'
             local('sed -i "s|MONGO_PASSWORD|{}|g" /root/templates/ssn.yml'.format(mongo_passwd))
@@ -213,16 +213,16 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
             local('sed -i "s|CLOUD_PROVIDER|{}|g" /root/templates/ssn.yml'.format(cloud_provider))
             local('sed -i "s|\${JRE_HOME}|' + java_path + '|g" /root/templates/ssn.yml')
             sudo('sed -i "s|KEYNAME|{}|g" {}/webapp/provisioning-service/conf/provisioning.yml'.
-                  format(os.environ['conf_key_name'], dlab_path))
+                  format(os.environ['conf_key_name'], datalab_path))
             put('/root/templates/ssn.yml', '/tmp/ssn.yml')
-            sudo('mv /tmp/ssn.yml ' + os.environ['ssn_dlab_path'] + 'conf/')
+            sudo('mv /tmp/ssn.yml ' + os.environ['ssn_datalab_path'] + 'conf/')
             put('/root/templates/proxy_location_webapp_template.conf', '/tmp/proxy_location_webapp_template.conf')
-            sudo('mv /tmp/proxy_location_webapp_template.conf ' + os.environ['ssn_dlab_path'] + 'tmp/')
+            sudo('mv /tmp/proxy_location_webapp_template.conf ' + os.environ['ssn_datalab_path'] + 'tmp/')
             if cloud_provider == 'gcp':
                 conf_parameter_name = '--spring.config.location='
                 with open('/root/templates/supervisor_svc.conf', 'r') as f:
                     text = f.read()
-                text = text.replace('WEB_CONF', dlab_conf_dir).replace('OS_USR', os_user)\
+                text = text.replace('WEB_CONF', datalab_conf_dir).replace('OS_USR', os_user)\
                     .replace('CONF_PARAMETER_NAME', conf_parameter_name)
                 with open('/root/templates/supervisor_svc.conf', 'w') as f:
                     f.write(text)
@@ -230,15 +230,15 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
                 conf_parameter_name = '--conf '
                 with open('/root/templates/supervisor_svc.conf', 'r') as f:
                     text = f.read()
-                text = text.replace('WEB_CONF', dlab_conf_dir).replace('OS_USR', os_user)\
+                text = text.replace('WEB_CONF', datalab_conf_dir).replace('OS_USR', os_user)\
                     .replace('CONF_PARAMETER_NAME', conf_parameter_name)
                 with open('/root/templates/supervisor_svc.conf', 'w') as f:
                     f.write(text)
             put('/root/templates/supervisor_svc.conf', '/tmp/supervisor_svc.conf')
-            sudo('mv /tmp/supervisor_svc.conf ' + os.environ['ssn_dlab_path'] + 'tmp/')
-            sudo('cp ' + os.environ['ssn_dlab_path'] +
+            sudo('mv /tmp/supervisor_svc.conf ' + os.environ['ssn_datalab_path'] + 'tmp/')
+            sudo('cp ' + os.environ['ssn_datalab_path'] +
                  'tmp/proxy_location_webapp_template.conf /etc/nginx/locations/proxy_location_webapp.conf')
-            sudo('cp ' + os.environ['ssn_dlab_path'] + 'tmp/supervisor_svc.conf {}'.format(supervisor_conf))
+            sudo('cp ' + os.environ['ssn_datalab_path'] + 'tmp/supervisor_svc.conf {}'.format(supervisor_conf))
             sudo('sed -i \'s=WEB_APP_DIR={}=\' {}'.format(web_path, supervisor_conf))
             try:
                 sudo('mkdir -p /var/log/application')
@@ -246,7 +246,7 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
                 for service in ['self-service', 'provisioning-service', 'billing']:
                     jar = sudo('cd {0}{1}/lib/; find {1}*.jar -type f'.format(web_path, service))
                     sudo('ln -s {0}{2}/lib/{1} {0}{2}/{2}.jar '.format(web_path, jar, service))
-                    sudo('cp {0}/webapp/{1}/conf/*.yml /tmp/yml_tmp/'.format(dlab_path, service))
+                    sudo('cp {0}/webapp/{1}/conf/*.yml /tmp/yml_tmp/'.format(datalab_path, service))
                 # Replacing Keycloak and cloud parameters
                 for item in json.loads(cloud_params):
                     if "KEYCLOAK_" in item['key']:
@@ -262,7 +262,7 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
                     sudo('sed -i "s|<LOGIN_USE_LDAP>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(ldap_login))
                     sudo('sed -i "s|<LOGIN_TENANT_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(tenant_id))
                     sudo('sed -i "s|<LOGIN_APPLICATION_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(application_id))
-                    sudo('sed -i "s|<DLAB_SUBSCRIPTION_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(subscription_id))
+                    sudo('sed -i "s|<DATA_LAB_SUBSCRIPTION_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(subscription_id))
                     sudo('sed -i "s|<MANAGEMENT_API_AUTH_FILE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(authentication_file))
                     sudo('sed -i "s|<VALIDATE_PERMISSION_SCOPE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(validate_permission_scope))
                     sudo('sed -i "s|<LOGIN_APPLICATION_REDIRECT_URL>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(hostname))
@@ -275,7 +275,7 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
                     #     permission_scope = 'subscriptions/{}/resourceGroups/{}/providers/Microsoft.Authorization/'.format(
                     #         subscription_id, service_base_name
                     #     )
-                sudo('mv /tmp/yml_tmp/* ' + os.environ['ssn_dlab_path'] + 'conf/')
+                sudo('mv /tmp/yml_tmp/* ' + os.environ['ssn_datalab_path'] + 'conf/')
                 sudo('rmdir /tmp/yml_tmp/')
             except Exception as err:
                 traceback.print_exc()
@@ -294,13 +294,13 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
                          '--aws_job_enabled {} ' \
                          '--report_path "{}" ' \
                          '--mongo_password {} ' \
-                         '--dlab_dir {} ' \
+                         '--datalab_dir {} ' \
                          '--authentication_file "{}" ' \
                          '--offer_number {} ' \
                          '--currency {} ' \
                          '--locale {} ' \
                          '--region_info {} ' \
-                         '--dlab_id {} ' \
+                         '--datalab_id {} ' \
                          '--usage_date {} ' \
                          '--product {} ' \
                          '--usage_type {} ' \
@@ -321,13 +321,13 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
                                    aws_job_enabled,
                                    report_path,
                                    mongo_passwd,
-                                   dlab_path,
+                                   datalab_path,
                                    authentication_file,
                                    offer_number,
                                    currency,
                                    locale,
                                    region_info,
-                                   dlab_id,
+                                   datalab_id,
                                    usage_date,
                                    product,
                                    usage_type,
@@ -343,7 +343,7 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
 
             try:
                 if os.environ['conf_stepcerts_enabled'] == 'true':
-                    sudo('openssl pkcs12 -export -in /etc/ssl/certs/dlab.crt -inkey /etc/ssl/certs/dlab.key -name ssn '
+                    sudo('openssl pkcs12 -export -in /etc/ssl/certs/datalab.crt -inkey /etc/ssl/certs/datalab.key -name ssn '
                          '-out ssn.p12 -password pass:{0}'.format(keystore_passwd))
                     sudo('keytool -importkeystore -srckeystore ssn.p12 -srcstoretype PKCS12 -alias ssn -destkeystore '
                          '/home/{0}/keys/ssn.keystore.jks -deststorepass "{1}" -srcstorepass "{1}"'.format(
@@ -353,24 +353,24 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
                         os_user, keystore_passwd))
                     sudo('keytool -importcert -trustcacerts -alias step-ca -file /etc/ssl/certs/root_ca.crt '
                          '-noprompt -storepass changeit -keystore {1}/lib/security/cacerts'.format(os_user, java_path))
-                    sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/dlab.crt -noprompt '
+                    sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt '
                          '-storepass changeit -keystore {0}/lib/security/cacerts'.format(java_path))
                 else:
                     if os.environ['conf_letsencrypt_enabled'] == 'true':
-                        print('Lets Encrypt certificates are not supported for redhat in dlab. Using self signed certificates')
+                        print('Lets Encrypt certificates are not supported for redhat in datalab. Using self signed certificates')
                     sudo('keytool -genkeypair -alias ssn -keyalg RSA -validity 730 -storepass {1} -keypass {1} \
                          -keystore /home/{0}/keys/ssn.keystore.jks -keysize 2048 -dname "CN=localhost"'.format(
                         os_user, keystore_passwd))
-                    sudo('keytool -exportcert -alias ssn -storepass {1} -file /etc/ssl/certs/dlab.crt \
+                    sudo('keytool -exportcert -alias ssn -storepass {1} -file /etc/ssl/certs/datalab.crt \
                          -keystore /home/{0}/keys/ssn.keystore.jks'.format(os_user, keystore_passwd))
-                    sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/dlab.crt -noprompt \
+                    sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt \
                          -storepass changeit -keystore {1}/lib/security/cacerts'.format(os_user, java_path))
             except:
                 append_result("Unable to generate cert and copy to java keystore")
                 sys.exit(1)
             sudo('systemctl restart supervisord')
             sudo('service nginx restart')
-            sudo('touch ' + os.environ['ssn_dlab_path'] + 'tmp/ss_started')
+            sudo('touch ' + os.environ['ssn_datalab_path'] + 'tmp/ss_started')
     except Exception as err:
         traceback.print_exc()
         print('Failed to start Self-service: ', str(err))
@@ -379,7 +379,7 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
 
 def install_build_dep():
     try:
-        if not exists('{}tmp/build_dep_ensured'.format(os.environ['ssn_dlab_path'])):
+        if not exists('{}tmp/build_dep_ensured'.format(os.environ['ssn_datalab_path'])):
             maven_version = '3.5.4'
             manage_pkg('-y install', 'remote', 'java-1.8.0-openjdk java-1.8.0-openjdk-devel git wget unzip')
             with cd('/opt/'):
@@ -390,7 +390,7 @@ def install_build_dep():
             sudo('bash -c "curl --silent --location https://rpm.nodesource.com/setup_12.x | bash -"')
             manage_pkg('-y install', 'remote', 'nodejs')
             sudo('npm config set unsafe-perm=true')
-            sudo('touch {}tmp/build_dep_ensured'.format(os.environ['ssn_dlab_path']))
+            sudo('touch {}tmp/build_dep_ensured'.format(os.environ['ssn_datalab_path']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to install build dependencies for UI: ', str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_collect_data.py b/infrastructure-provisioning/src/general/scripts/aws/common_collect_data.py
index e63a63f..fe62e8c 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_collect_data.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_collect_data.py
@@ -25,9 +25,9 @@ import argparse
 import json
 import datetime
 from fabric.api import *
-from dlab.actions_lib import *
-from dlab.meta_lib import *
-from dlab.fab import *
+from datalab.actions_lib import *
+from datalab.meta_lib import *
+from datalab.fab import *
 import traceback
 import sys
 import ast
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_create_bucket.py b/infrastructure-provisioning/src/general/scripts/aws/common_create_bucket.py
index 207af06..6a09b96 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_create_bucket.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_create_bucket.py
@@ -23,8 +23,8 @@
 
 import argparse
 import json
-from dlab.actions_lib import *
-from dlab.meta_lib import *
+from datalab.actions_lib import *
+from datalab.meta_lib import *
 import sys
 
 
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_create_instance.py b/infrastructure-provisioning/src/general/scripts/aws/common_create_instance.py
index ecf46be..a97f228 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_create_instance.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_create_instance.py
@@ -23,8 +23,8 @@
 
 import argparse
 import json
-from dlab.actions_lib import *
-from dlab.meta_lib import *
+from datalab.actions_lib import *
+from datalab.meta_lib import *
 import sys
 
 
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_create_notebook_image.py b/infrastructure-provisioning/src/general/scripts/aws/common_create_notebook_image.py
index 1d5cb04..39481bc 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_create_notebook_image.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_create_notebook_image.py
@@ -21,9 +21,9 @@
 #
 # ******************************************************************************
 
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import sys
 import json
 import uuid
@@ -33,8 +33,8 @@ import os
 if __name__ == "__main__":
     try:
         image_conf = dict()
-        dlab.actions_lib.create_aws_config_files()
-        image_conf['service_base_name'] = os.environ['conf_service_base_name'] = dlab.fab.replace_multi_symbols(
+        datalab.actions_lib.create_aws_config_files()
+        image_conf['service_base_name'] = os.environ['conf_service_base_name'] = datalab.fab.replace_multi_symbols(
             os.environ['conf_service_base_name'][:20], '-', True)
         image_conf['project_name'] = os.environ['project_name']
         image_conf['project_tag'] = os.environ['project_name']
@@ -55,7 +55,7 @@ if __name__ == "__main__":
                               "FIN": image_conf['full_image_name'],
                               os.environ['conf_billing_tag_key']: os.environ['conf_billing_tag_value']}
 
-        ami_id = dlab.meta_lib.get_ami_id_by_name(image_conf['full_image_name'])
+        ami_id = datalab.meta_lib.get_ami_id_by_name(image_conf['full_image_name'])
         if ami_id == '':
             try:
                 os.environ['conf_additional_tags'] = os.environ['conf_additional_tags'] + \
@@ -64,7 +64,7 @@ if __name__ == "__main__":
             except KeyError:
                 os.environ['conf_additional_tags'] = 'project_tag:{0};endpoint_tag:{1}'.format(
                     os.environ['project_name'], os.environ['endpoint_name'])
-            image_id = dlab.actions_lib.create_image_from_instance(tag_name=image_conf['instance_tag'],
+            image_id = datalab.actions_lib.create_image_from_instance(tag_name=image_conf['instance_tag'],
                                                                    instance_name=image_conf['instance_name'],
                                                                    image_name=image_conf['full_image_name'],
                                                                    tags=json.dumps(image_conf['tags']))
@@ -79,5 +79,5 @@ if __name__ == "__main__":
                        "Action": "Create image from notebook"}
                 result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Failed to create image from notebook", str(err))
+        datalab.fab.append_result("Failed to create image from notebook", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_create_policy.py b/infrastructure-provisioning/src/general/scripts/aws/common_create_policy.py
index 8b7f038..1bef0c6 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_create_policy.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_create_policy.py
@@ -22,8 +22,8 @@
 # ******************************************************************************
 
 import argparse
-from dlab.actions_lib import *
-from dlab.meta_lib import *
+from datalab.actions_lib import *
+from datalab.meta_lib import *
 import sys
 import boto3, botocore
 
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_create_role_policy.py b/infrastructure-provisioning/src/general/scripts/aws/common_create_role_policy.py
index 1f914c1..4763bd3 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_create_role_policy.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_create_role_policy.py
@@ -22,8 +22,8 @@
 # ******************************************************************************
 
 import argparse
-from dlab.actions_lib import *
-from dlab.meta_lib import *
+from datalab.actions_lib import *
+from datalab.meta_lib import *
 import sys
 
 parser = argparse.ArgumentParser()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_create_security_group.py b/infrastructure-provisioning/src/general/scripts/aws/common_create_security_group.py
index 4ee0575..3fd252f 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_create_security_group.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_create_security_group.py
@@ -23,8 +23,8 @@
 
 import json
 import argparse
-from dlab.actions_lib import *
-from dlab.meta_lib import *
+from datalab.actions_lib import *
+from datalab.meta_lib import *
 import sys
 from botocore.exceptions import ClientError
 
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_create_subnet.py b/infrastructure-provisioning/src/general/scripts/aws/common_create_subnet.py
index 83dd48a..d5b560b 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_create_subnet.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_create_subnet.py
@@ -24,8 +24,8 @@
 import argparse
 import json
 from botocore import exceptions
-from dlab.actions_lib import *
-from dlab.meta_lib import *
+from datalab.actions_lib import *
+from datalab.meta_lib import *
 import sys
 import boto3
 import ipaddress
@@ -77,18 +77,18 @@ if __name__ == "__main__":
                 else:
                     break
 
-            dlab_subnet_cidr = ''
+            datalab_subnet_cidr = ''
             if previous_subnet_size < private_subnet_size:
                 while True:
                     try:
-                        dlab_subnet_cidr = '{0}/{1}'.format(ipaddress.ip_address(last_ip + 1), args.prefix)
-                        ipaddress.ip_network(dlab_subnet_cidr.decode('utf-8'))
+                        datalab_subnet_cidr = '{0}/{1}'.format(ipaddress.ip_address(last_ip + 1), args.prefix)
+                        ipaddress.ip_network(datalab_subnet_cidr.decode('utf-8'))
                         break
                     except ValueError:
                         last_ip = last_ip + 2
                         continue
             else:
-                dlab_subnet_cidr = '{0}/{1}'.format(ipaddress.ip_address(last_ip + 1), args.prefix)
+                datalab_subnet_cidr = '{0}/{1}'.format(ipaddress.ip_address(last_ip + 1), args.prefix)
         else:
             pre_defined_subnet_list = []
             subnet_cidr = args.user_subnets_range.split('-')[0].replace(' ', '')
@@ -111,18 +111,18 @@ if __name__ == "__main__":
                 print("There is no available subnet to create. Aborting...")
                 sys.exit(1)
             else:
-                dlab_subnet_cidr = available_subnets[0]
+                datalab_subnet_cidr = available_subnets[0]
         if args.ssn:
-            subnet_id = get_subnet_by_cidr(dlab_subnet_cidr, args.vpc_id)
+            subnet_id = get_subnet_by_cidr(datalab_subnet_cidr, args.vpc_id)
             subnet_check = get_subnet_by_tag(tag, False, args.vpc_id)
         else:
-            subnet_id = get_subnet_by_cidr(dlab_subnet_cidr, args.vpc_id)
+            subnet_id = get_subnet_by_cidr(datalab_subnet_cidr, args.vpc_id)
             subnet_check = get_subnet_by_tag(tag, args.vpc_id)
         if not subnet_check:
             if subnet_id == '':
                 print("Creating subnet {0} in vpc {1} with tag {2}".
-                      format(dlab_subnet_cidr, args.vpc_id, json.dumps(tag)))
-                subnet_id = create_subnet(args.vpc_id, dlab_subnet_cidr, tag, args.zone)
+                      format(datalab_subnet_cidr, args.vpc_id, json.dumps(tag)))
+                subnet_id = create_subnet(args.vpc_id, datalab_subnet_cidr, tag, args.zone)
                 create_tag(subnet_id, tag_name)
         else:
             print("REQUESTED SUBNET ALREADY EXISTS. USING CIDR {}".format(subnet_check))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py b/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py
index 8051e6d..ae9f28d 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py
@@ -22,7 +22,7 @@
 # ******************************************************************************
 
 import argparse
-from dlab.actions_lib import *
+from datalab.actions_lib import *
 from fabric.api import *
 import os
 
@@ -49,7 +49,7 @@ if __name__ == "__main__":
     bucket_name = ('{0}-{1}-{2}-bucket'.format(service_base_name,
                                                project_name, endpoint_name)).lower().replace('_', '-')
     gitlab_certfile = os.environ['conf_gitlab_certfile']
-    if dlab.actions_lib.get_gitlab_cert(bucket_name, gitlab_certfile):
+    if datalab.actions_lib.get_gitlab_cert(bucket_name, gitlab_certfile):
         put(gitlab_certfile, gitlab_certfile)
         sudo('chown root:root {}'.format(gitlab_certfile))
         print('{} has been downloaded'.format(gitlab_certfile))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine-service.py b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine-service.py
index 1d0df4f..decc617 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine-service.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine-service.py
@@ -24,9 +24,9 @@
 import logging
 import json
 import sys
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import traceback
 import os
 import uuid
@@ -34,9 +34,9 @@ from fabric.api import *
 
 
 def clear_resources():
-    emr_id = dlab.meta_lib.get_emr_id_by_name(notebook_config['cluster_name'])
-    dlab.actions_lib.terminate_emr(emr_id)
-    dlab.actions_lib. remove_kernels(notebook_config['cluster_name'], notebook_config['tag_name'],
+    emr_id = datalab.meta_lib.get_emr_id_by_name(notebook_config['cluster_name'])
+    datalab.actions_lib.terminate_emr(emr_id)
+    datalab.actions_lib. remove_kernels(notebook_config['cluster_name'], notebook_config['tag_name'],
                                      os.environ['notebook_instance_name'], os.environ['conf_os_user'],
                                      notebook_config['key_path'], os.environ['emr_version'])
 
@@ -50,10 +50,10 @@ if __name__ == "__main__":
                         filename=local_log_filepath)
     try:
         # generating variables dictionary
-        dlab.actions_lib.create_aws_config_files()
+        datalab.actions_lib.create_aws_config_files()
         print('Generating infrastructure names and tags')
         notebook_config = dict()
-        notebook_config['service_base_name'] = os.environ['conf_service_base_name'] = dlab.fab.replace_multi_symbols(
+        notebook_config['service_base_name'] = os.environ['conf_service_base_name'] = datalab.fab.replace_multi_symbols(
                 os.environ['conf_service_base_name'][:20], '-', True)
         notebook_config['notebook_name'] = os.environ['notebook_instance_name']
         notebook_config['tag_name'] = notebook_config['service_base_name'] + '-tag'
@@ -63,22 +63,22 @@ if __name__ == "__main__":
                                                                       notebook_config['project_name'],
                                                                       notebook_config['endpoint_name']
                                                                      ).lower().replace('_', '-')
-        notebook_config['cluster_name'] = dlab.meta_lib.get_not_configured_emr(notebook_config['tag_name'],
+        notebook_config['cluster_name'] = datalab.meta_lib.get_not_configured_emr(notebook_config['tag_name'],
                                                                                notebook_config['notebook_name'], True)
-        notebook_config['notebook_ip'] = dlab.meta_lib.get_instance_ip_address(
+        notebook_config['notebook_ip'] = datalab.meta_lib.get_instance_ip_address(
             notebook_config['tag_name'], notebook_config['notebook_name']).get('Private')
         notebook_config['key_path'] = os.environ['conf_key_dir'] + '/' + os.environ['conf_key_name'] + '.pem'
-        notebook_config['cluster_id'] = dlab.meta_lib.get_emr_id_by_name(notebook_config['cluster_name'])
+        notebook_config['cluster_id'] = datalab.meta_lib.get_emr_id_by_name(notebook_config['cluster_name'])
         edge_instance_name = '{}-{}-{}-edge'.format(notebook_config['service_base_name'],
                                                     os.environ['project_name'], os.environ['endpoint_name'])
-        edge_instance_hostname = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'], edge_instance_name)
+        edge_instance_hostname = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'], edge_instance_name)
         if os.environ['application'] == 'deeplearning':
             application = 'jupyter'
         else:
             application = os.environ['application']
     except Exception as err:
         clear_resources()
-        dlab.fab.append_result("Failed to generate variables dictionary.", str(err))
+        datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
         sys.exit(1)
 
     try:
@@ -94,14 +94,14 @@ if __name__ == "__main__":
                     os.environ['application'], os.environ['conf_pypi_mirror'])
         try:
             local("~/scripts/{}_{}.py {}".format(application, 'install_dataengine-service_kernels', params))
-            dlab.actions_lib.remove_emr_tag(notebook_config['cluster_id'], ['State'])
-            dlab.actions_lib.tag_emr_volume(notebook_config['cluster_id'], notebook_config['cluster_name'],
+            datalab.actions_lib.remove_emr_tag(notebook_config['cluster_id'], ['State'])
+            datalab.actions_lib.tag_emr_volume(notebook_config['cluster_id'], notebook_config['cluster_name'],
                                             os.environ['conf_tag_resource_id'])
         except:
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed installing EMR kernels.", str(err))
+        datalab.fab.append_result("Failed installing EMR kernels.", str(err))
         clear_resources()
         sys.exit(1)
 
@@ -116,14 +116,14 @@ if __name__ == "__main__":
                     os.environ['conf_os_user'])
         try:
             local("~/scripts/{0}.py {1}".format('common_configure_spark', params))
-            dlab.actions_lib.remove_emr_tag(notebook_config['cluster_id'], ['State'])
-            dlab.actions_lib.tag_emr_volume(notebook_config['cluster_id'], notebook_config['cluster_name'],
+            datalab.actions_lib.remove_emr_tag(notebook_config['cluster_id'], ['State'])
+            datalab.actions_lib.tag_emr_volume(notebook_config['cluster_id'], notebook_config['cluster_name'],
                                             os.environ['conf_tag_resource_id'])
         except:
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure Spark.", str(err))
+        datalab.fab.append_result("Failed to configure Spark.", str(err))
         clear_resources()
         sys.exit(1)
 
@@ -135,6 +135,6 @@ if __name__ == "__main__":
             print(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results", str(err))
+        datalab.fab.append_result("Error with writing results", str(err))
         clear_resources()
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py
index c80328b..7ea51e3 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py
@@ -24,9 +24,9 @@
 import logging
 import json
 import sys
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import traceback
 import os
 import uuid
@@ -34,10 +34,10 @@ from fabric.api import *
 
 
 def clear_resources():
-    dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['master_node_name'])
+    datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['master_node_name'])
     for i in range(notebook_config['instance_count'] - 1):
         slave_name = notebook_config['slave_node_name'] + '{}'.format(i + 1)
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], slave_name)
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], slave_name)
 
 
 if __name__ == "__main__":
@@ -50,7 +50,7 @@ if __name__ == "__main__":
 
     try:
         # generating variables dictionary
-        dlab.actions_lib.create_aws_config_files()
+        datalab.actions_lib.create_aws_config_files()
         print('Generating infrastructure names and tags')
         notebook_config = dict()
         if 'exploratory_name' in os.environ:
@@ -61,7 +61,7 @@ if __name__ == "__main__":
             notebook_config['computational_name'] = os.environ['computational_name']
         else:
             notebook_config['computational_name'] = ''
-        notebook_config['service_base_name'] = os.environ['conf_service_base_name'] = dlab.fab.replace_multi_symbols(
+        notebook_config['service_base_name'] = os.environ['conf_service_base_name'] = datalab.fab.replace_multi_symbols(
             os.environ['conf_service_base_name'][:20], '-', True)
         notebook_config['region'] = os.environ['aws_region']
         notebook_config['tag_name'] = notebook_config['service_base_name'] + '-tag'
@@ -75,21 +75,21 @@ if __name__ == "__main__":
         notebook_config['slave_node_name'] = notebook_config['cluster_name'] + '-s'
         notebook_config['notebook_name'] = os.environ['notebook_instance_name']
         notebook_config['key_path'] = os.environ['conf_key_dir'] + '/' + os.environ['conf_key_name'] + '.pem'
-        notebook_config['dlab_ssh_user'] = os.environ['conf_os_user']
+        notebook_config['datalab_ssh_user'] = os.environ['conf_os_user']
         notebook_config['instance_count'] = int(os.environ['dataengine_instance_count'])
         try:
-            notebook_config['spark_master_ip'] = dlab.meta_lib.get_instance_private_ip_address(
+            notebook_config['spark_master_ip'] = datalab.meta_lib.get_instance_private_ip_address(
                 notebook_config['tag_name'], notebook_config['master_node_name'])
-            notebook_config['notebook_ip'] = dlab.meta_lib.get_instance_private_ip_address(
+            notebook_config['notebook_ip'] = datalab.meta_lib.get_instance_private_ip_address(
                 notebook_config['tag_name'], notebook_config['notebook_name'])
         except Exception as err:
-            dlab.fab.append_result("Failed to get ip address", str(err))
+            datalab.fab.append_result("Failed to get ip address", str(err))
             sys.exit(1)
         notebook_config['spark_master_url'] = 'spark://{}:7077'.format(notebook_config['spark_master_ip'])
 
     except Exception as err:
         clear_resources()
-        dlab.fab.append_result("Failed to generate infrastructure names", str(err))
+        datalab.fab.append_result("Failed to generate infrastructure names", str(err))
         sys.exit(1)
 
     try:
@@ -98,7 +98,7 @@ if __name__ == "__main__":
         params = "--cluster_name {0} --spark_version {1} --hadoop_version {2} --os_user {3} --spark_master {4}" \
                  " --keyfile {5} --notebook_ip {6} --spark_master_ip {7}".\
             format(notebook_config['cluster_name'], os.environ['notebook_spark_version'],
-                   os.environ['notebook_hadoop_version'], notebook_config['dlab_ssh_user'],
+                   os.environ['notebook_hadoop_version'], notebook_config['datalab_ssh_user'],
                    notebook_config['spark_master_url'], notebook_config['key_path'],
                    notebook_config['notebook_ip'], notebook_config['spark_master_ip'])
         try:
@@ -108,7 +108,7 @@ if __name__ == "__main__":
             raise Exception
     except Exception as err:
         clear_resources()
-        dlab.fab.append_result("Failed installing Dataengine kernels.", str(err))
+        datalab.fab.append_result("Failed installing Dataengine kernels.", str(err))
         sys.exit(1)
 
     try:
@@ -120,7 +120,7 @@ if __name__ == "__main__":
                  "--cluster_name {3}" \
             .format(notebook_config['notebook_ip'],
                     notebook_config['key_path'],
-                    notebook_config['dlab_ssh_user'],
+                    notebook_config['datalab_ssh_user'],
                     notebook_config['cluster_name'])
         try:
             local("~/scripts/{0}.py {1}".format('common_configure_spark', params))
@@ -129,7 +129,7 @@ if __name__ == "__main__":
             raise Exception
     except Exception as err:
         clear_resources()
-        dlab.fab.append_result("Failed to configure Spark.", str(err))
+        datalab.fab.append_result("Failed to configure Spark.", str(err))
         sys.exit(1)
 
     try:
@@ -139,6 +139,6 @@ if __name__ == "__main__":
             print(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results", str(err))
+        datalab.fab.append_result("Error with writing results", str(err))
         clear_resources()
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py b/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py
index 5a1a6f0..e817ff1 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py
@@ -27,9 +27,9 @@ import sys
 import os
 import argparse
 import traceback
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 from fabric.api import *
 
 parser = argparse.ArgumentParser()
@@ -46,7 +46,7 @@ if __name__ == "__main__":
                         filename=local_log_filepath)
     try:
         # generating variables dictionary
-        dlab.actions_lib.create_aws_config_files()
+        datalab.actions_lib.create_aws_config_files()
         notebook_config = dict()
         notebook_config['service_base_name'] = os.environ['conf_service_base_name']
         notebook_config['project_name'] = os.environ['project_name']
@@ -54,17 +54,17 @@ if __name__ == "__main__":
         notebook_config['edge_name'] = '{}-{}-{}-edge'.format(notebook_config['service_base_name'],
                                                               notebook_config['project_name'],
                                                               notebook_config['endpoint_name'])
-        edge_status = dlab.meta_lib.get_instance_status(notebook_config['service_base_name'] + '-tag',
+        edge_status = datalab.meta_lib.get_instance_status(notebook_config['service_base_name'] + '-tag',
                                                         notebook_config['edge_name'])
         if edge_status != 'running':
             logging.info('ERROR: Edge node is unavailable! Aborting...')
             print('ERROR: Edge node is unavailable! Aborting...')
-            notebook_config['ssn_hostname'] = dlab.meta_lib.get_instance_hostname(
+            notebook_config['ssn_hostname'] = datalab.meta_lib.get_instance_hostname(
                 '{}-tag'.format(notebook_config['service_base_name']),
                 '{}-ssn'.format(notebook_config['service_base_name']))
-            dlab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_dlab_path'], os.environ['conf_os_user'],
+            datalab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_datalab_path'], os.environ['conf_os_user'],
                                          notebook_config['ssn_hostname'])
-            dlab.fab.append_result("Edge node is unavailable")
+            datalab.fab.append_result("Edge node is unavailable")
             sys.exit(1)
         print('Generating infrastructure names and tags')
         try:
@@ -99,9 +99,9 @@ if __name__ == "__main__":
             os.environ['application'], os.environ['notebook_image_name']) if (x != 'None' and x != '')
             else notebook_config['expected_image_name'])(str(os.environ.get('notebook_image_name')))
         print('Searching pre-configured images')
-        notebook_config['ami_id'] = dlab.meta_lib.get_ami_id(os.environ['aws_{}_image_name'.format(
+        notebook_config['ami_id'] = datalab.meta_lib.get_ami_id(os.environ['aws_{}_image_name'.format(
             os.environ['conf_os_family'])])
-        image_id = dlab.meta_lib.get_ami_id_by_name(notebook_config['notebook_image_name'], 'available')
+        image_id = datalab.meta_lib.get_ami_id_by_name(notebook_config['notebook_image_name'], 'available')
         if image_id != '':
             notebook_config['ami_id'] = image_id
             print('Pre-configured image found. Using: {}'.format(notebook_config['ami_id']))
@@ -112,7 +112,7 @@ if __name__ == "__main__":
         tag = {"Key": notebook_config['tag_name'],
                "Value": "{}-{}-{}-subnet".format(notebook_config['service_base_name'], notebook_config['project_name'],
                                                  notebook_config['endpoint_name'])}
-        notebook_config['subnet_cidr'] = dlab.meta_lib.get_subnet_by_tag(tag)
+        notebook_config['subnet_cidr'] = datalab.meta_lib.get_subnet_by_tag(tag)
         keyfile_name = "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
 
         with open('/root/result.json', 'w') as f:
@@ -128,7 +128,7 @@ if __name__ == "__main__":
 
         print('Additional tags will be added: {}'.format(os.environ['conf_additional_tags']))
     except Exception as err:
-        dlab.fab.append_result("Failed to generate variables dictionary.", str(err))
+        datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
         sys.exit(1)
 
     # launching instance for notebook server
@@ -140,8 +140,8 @@ if __name__ == "__main__":
                  "--instance_disk_size {} --primary_disk_size {}" .format(
                   notebook_config['instance_name'], notebook_config['ami_id'], notebook_config['instance_type'],
                   notebook_config['key_name'],
-                  dlab.meta_lib.get_security_group_by_name(notebook_config['security_group_name']),
-                  dlab.meta_lib.get_subnet_by_cidr(notebook_config['subnet_cidr'], os.environ['aws_notebook_vpc_id']),
+                  datalab.meta_lib.get_security_group_by_name(notebook_config['security_group_name']),
+                  datalab.meta_lib.get_subnet_by_cidr(notebook_config['subnet_cidr'], os.environ['aws_notebook_vpc_id']),
                   notebook_config['role_profile_name'],
                   notebook_config['tag_name'], notebook_config['instance_name'], instance_class,
                   os.environ['notebook_disk_size'], notebook_config['primary_disk_size'])
@@ -152,6 +152,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to create instance.", str(err))
+        datalab.fab.append_result("Failed to create instance.", str(err))
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_put_to_bucket.py b/infrastructure-provisioning/src/general/scripts/aws/common_put_to_bucket.py
index 8f433fe..8a94e4d 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_put_to_bucket.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_put_to_bucket.py
@@ -22,7 +22,7 @@
 # ******************************************************************************
 
 import argparse
-from dlab.actions_lib import *
+from datalab.actions_lib import *
 import sys
 
 parser = argparse.ArgumentParser()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_remove_remote_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/common_remove_remote_kernels.py
index 826bff4..a1f91f0 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_remove_remote_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_remove_remote_kernels.py
@@ -25,8 +25,8 @@ import os
 import sys
 import argparse
 from fabric.api import *
-from dlab.fab import find_cluster_kernels
-from dlab.actions_lib import *
+from datalab.fab import find_cluster_kernels
+from datalab.actions_lib import *
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_reupload_key.py b/infrastructure-provisioning/src/general/scripts/aws/common_reupload_key.py
index 3fd8adf..16f0039 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_reupload_key.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_reupload_key.py
@@ -24,9 +24,9 @@
 
 import argparse
 from fabric.api import *
-from dlab.actions_lib import *
-from dlab.meta_lib import *
-from dlab.fab import *
+from datalab.actions_lib import *
+from datalab.meta_lib import *
+from datalab.fab import *
 import json
 
 parser = argparse.ArgumentParser()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py b/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py
index d153082..a194955 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py
@@ -24,9 +24,9 @@
 import logging
 import json
 import sys
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import traceback
 import os
 import uuid
@@ -42,7 +42,7 @@ if __name__ == "__main__":
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     # generating variables dictionary
-    dlab.actions_lib.create_aws_config_files()
+    datalab.actions_lib.create_aws_config_files()
     print('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
@@ -55,10 +55,10 @@ if __name__ == "__main__":
         params = "--tag_name {} --nb_tag_value {}".format(notebook_config['tag_name'], notebook_config['notebook_name'])
         try:
             print("Starting notebook")
-            dlab.actions_lib.start_ec2(notebook_config['tag_name'], notebook_config['notebook_name'])
+            datalab.actions_lib.start_ec2(notebook_config['tag_name'], notebook_config['notebook_name'])
         except Exception as err:
             traceback.print_exc()
-            dlab.fab.append_result("Failed to start notebook.", str(err))
+            datalab.fab.append_result("Failed to start notebook.", str(err))
             raise Exception
     except:
         sys.exit(1)
@@ -66,7 +66,7 @@ if __name__ == "__main__":
     try:
         logging.info('[SETUP USER GIT CREDENTIALS]')
         print('[SETUP USER GIT CREDENTIALS]')
-        notebook_config['notebook_ip'] = dlab.meta_lib.get_instance_ip_address(
+        notebook_config['notebook_ip'] = datalab.meta_lib.get_instance_ip_address(
             notebook_config['tag_name'], notebook_config['notebook_name']).get('Private')
         notebook_config['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
@@ -75,7 +75,7 @@ if __name__ == "__main__":
             local("~/scripts/{}.py {}".format('manage_git_creds', params))
         except Exception as err:
             traceback.print_exc()
-            dlab.fab.append_result("Failed to setup git credentials.", str(err))
+            datalab.fab.append_result("Failed to setup git credentials.", str(err))
             raise Exception
     except:
         sys.exit(1)
@@ -89,15 +89,15 @@ if __name__ == "__main__":
             local("~/scripts/{}.py {}".format('update_inactivity_on_start', params))
         except Exception as err:
             traceback.print_exc()
-            dlab.fab.append_result("Failed to update last activity time.", str(err))
+            datalab.fab.append_result("Failed to update last activity time.", str(err))
             raise Exception
     except:
         sys.exit(1)
 
     try:
-        ip_address = dlab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
+        ip_address = datalab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
                                                            notebook_config['notebook_name']).get('Private')
-        dns_name = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'], notebook_config['notebook_name'])
+        dns_name = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'], notebook_config['notebook_name'])
         print('[SUMMARY]')
         logging.info('[SUMMARY]')
         print("Instance name: {}".format(notebook_config['notebook_name']))
@@ -112,7 +112,7 @@ if __name__ == "__main__":
             print(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results", str(err))
+        datalab.fab.append_result("Error with writing results", str(err))
         sys.exit(1)
 
 
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_stop_notebook.py b/infrastructure-provisioning/src/general/scripts/aws/common_stop_notebook.py
index 679d4eb..cb6b213 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_stop_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_stop_notebook.py
@@ -24,9 +24,9 @@
 import logging
 import json
 import sys
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 from fabric.api import *
 import traceback
 import os
@@ -39,7 +39,7 @@ import sys
 def stop_notebook(nb_tag_value, bucket_name, tag_name, ssh_user, key_path):
     print('Terminating EMR cluster and cleaning EMR config from S3 bucket')
     try:
-        clusters_list = dlab.meta_lib.get_emr_list(nb_tag_value, 'Value')
+        clusters_list = datalab.meta_lib.get_emr_list(nb_tag_value, 'Value')
         if clusters_list:
             for cluster_id in clusters_list:
                 computational_name = ''
@@ -51,11 +51,11 @@ def stop_notebook(nb_tag_value, bucket_name, tag_name, ssh_user, key_path):
                 for tag in cluster.get('Tags'):
                     if tag.get('Key') == 'ComputationalName':
                         computational_name = tag.get('Value')
-                dlab.actions_lib.s3_cleanup(bucket_name, emr_name, os.environ['project_name'])
+                datalab.actions_lib.s3_cleanup(bucket_name, emr_name, os.environ['project_name'])
                 print("The bucket {} has been cleaned successfully".format(bucket_name))
-                dlab.actions_lib.terminate_emr(cluster_id)
+                datalab.actions_lib.terminate_emr(cluster_id)
                 print("The EMR cluster {} has been terminated successfully".format(emr_name))
-                dlab.actions_lib.remove_kernels(emr_name, tag_name, nb_tag_value, ssh_user, key_path, emr_version,
+                datalab.actions_lib.remove_kernels(emr_name, tag_name, nb_tag_value, ssh_user, key_path, emr_version,
                                                 computational_name)
                 print("{} kernels have been removed from notebook successfully".format(emr_name))
         else:
@@ -67,22 +67,22 @@ def stop_notebook(nb_tag_value, bucket_name, tag_name, ssh_user, key_path):
     try:
         cluster_list = []
         master_ids = []
-        cluster_instances_list = dlab.meta_lib.get_ec2_list('dataengine_notebook_name', nb_tag_value)
+        cluster_instances_list = datalab.meta_lib.get_ec2_list('dataengine_notebook_name', nb_tag_value)
         for instance in cluster_instances_list:
             for tag in instance.tags:
                 if tag['Key'] == 'Type' and tag['Value'] == 'master':
                     master_ids.append(instance.id)
         for id in master_ids:
-            for tag in dlab.meta_lib.get_instance_attr(id, 'tags'):
+            for tag in datalab.meta_lib.get_instance_attr(id, 'tags'):
                 if tag['Key'] == 'Name':
                     cluster_list.append(tag['Value'].replace(' ', '')[:-2])
-        dlab.actions_lib.stop_ec2('dataengine_notebook_name', nb_tag_value)
+        datalab.actions_lib.stop_ec2('dataengine_notebook_name', nb_tag_value)
     except:
         sys.exit(1)
 
     print("Stopping notebook")
     try:
-        dlab.actions_lib.stop_ec2(tag_name, nb_tag_value)
+        datalab.actions_lib.stop_ec2(tag_name, nb_tag_value)
     except:
         sys.exit(1)
 
@@ -96,7 +96,7 @@ if __name__ == "__main__":
                         filename=local_log_filepath)
 
     # generating variables dictionary
-    dlab.actions_lib.create_aws_config_files()
+    datalab.actions_lib.create_aws_config_files()
     print('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
@@ -117,7 +117,7 @@ if __name__ == "__main__":
                       os.environ['conf_os_user'], notebook_config['key_path'])
     except Exception as err:
         print('Error: {0}'.format(err))
-        dlab.fab.append_result("Failed to stop notebook.", str(err))
+        datalab.fab.append_result("Failed to stop notebook.", str(err))
         sys.exit(1)
 
 
@@ -130,6 +130,6 @@ if __name__ == "__main__":
             print(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results", str(err))
+        datalab.fab.append_result("Error with writing results", str(err))
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_terminate_notebook.py b/infrastructure-provisioning/src/general/scripts/aws/common_terminate_notebook.py
index c199089..4a7be6e 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_terminate_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_terminate_notebook.py
@@ -24,9 +24,9 @@
 import logging
 import json
 import sys
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import traceback
 import os
 import boto3
@@ -36,7 +36,7 @@ import uuid
 def terminate_nb(nb_tag_value, bucket_name, tag_name):
     print('Terminating EMR cluster and cleaning EMR config from S3 bucket')
     try:
-        clusters_list = dlab.meta_lib.get_emr_list(nb_tag_value, 'Value')
+        clusters_list = datalab.meta_lib.get_emr_list(nb_tag_value, 'Value')
         if clusters_list:
             for cluster_id in clusters_list:
                 client = boto3.client('emr')
@@ -44,10 +44,10 @@ def terminate_nb(nb_tag_value, bucket_name, tag_name):
                 cluster = cluster.get("Cluster")
                 emr_name = cluster.get('Name')
                 print('Cleaning bucket from configs for cluster {}'.format(emr_name))
-                dlab.actions_lib.s3_cleanup(bucket_name, emr_name, os.environ['project_name'])
+                datalab.actions_lib.s3_cleanup(bucket_name, emr_name, os.environ['project_name'])
                 print("The bucket {} has been cleaned successfully".format(bucket_name))
                 print('Terminating cluster {}'.format(emr_name))
-                dlab.actions_lib.terminate_emr(cluster_id)
+                datalab.actions_lib.terminate_emr(cluster_id)
                 print("The EMR cluster {} has been terminated successfully".format(emr_name))
         else:
             print("There are no EMR clusters to terminate.")
@@ -56,13 +56,13 @@ def terminate_nb(nb_tag_value, bucket_name, tag_name):
 
     print("Terminating data engine cluster")
     try:
-        dlab.actions_lib.remove_ec2('dataengine_notebook_name', nb_tag_value)
+        datalab.actions_lib.remove_ec2('dataengine_notebook_name', nb_tag_value)
     except:
         sys.exit(1)
 
     print("Terminating notebook")
     try:
-        dlab.actions_lib.remove_ec2(tag_name, nb_tag_value)
+        datalab.actions_lib.remove_ec2(tag_name, nb_tag_value)
     except:
         sys.exit(1)
 
@@ -75,7 +75,7 @@ if __name__ == "__main__":
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     # generating variables dictionary
-    dlab.actions_lib.create_aws_config_files()
+    datalab.actions_lib.create_aws_config_files()
     print('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
@@ -95,7 +95,7 @@ if __name__ == "__main__":
             terminate_nb(notebook_config['notebook_name'], notebook_config['bucket_name'], notebook_config['tag_name'])
         except Exception as err:
             traceback.print_exc()
-            dlab.fab.append_result("Failed to terminate notebook.", str(err))
+            datalab.fab.append_result("Failed to terminate notebook.", str(err))
             raise Exception
     except:
         sys.exit(1)
@@ -109,5 +109,5 @@ if __name__ == "__main__":
             print(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results", str(err))
+        datalab.fab.append_result("Error with writing results", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_terminate_notebook_image.py b/infrastructure-provisioning/src/general/scripts/aws/common_terminate_notebook_image.py
index 3da4f63..3a1a1b6 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_terminate_notebook_image.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_terminate_notebook_image.py
@@ -21,9 +21,9 @@
 #
 # ******************************************************************************
 
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import sys
 import json
 import os
@@ -31,13 +31,13 @@ import os
 
 if __name__ == "__main__":
     try:
-        dlab.actions_lib.create_aws_config_files()
+        datalab.actions_lib.create_aws_config_files()
         image_conf = dict()
         image_conf['full_image_name'] = os.environ['notebook_image_name']
 
-        image_id = dlab.meta_lib.get_ami_id_by_name(image_conf['full_image_name'], 'available')
+        image_id = datalab.meta_lib.get_ami_id_by_name(image_conf['full_image_name'], 'available')
         if image_id != '':
-            dlab.actions_lib.deregister_image(image_conf['full_image_name'])
+            datalab.actions_lib.deregister_image(image_conf['full_image_name'])
 
             with open("/root/result.json", 'w') as result:
                 res = {"notebook_image_name": image_conf['full_image_name'],
@@ -45,5 +45,5 @@ if __name__ == "__main__":
                        "Action": "Delete existing notebook image"}
                 result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Failed to delete existing notebook image", str(err))
+        datalab.fab.append_result("Failed to delete existing notebook image", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
index 147247a..77e4f45 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
@@ -24,16 +24,16 @@
 import json
 import time
 from fabric.api import *
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import traceback
 import sys
 import os
 import logging
 import argparse
 import multiprocessing
-from dlab.common_lib import manage_pkg
+from datalab.common_lib import manage_pkg
 
 
 parser = argparse.ArgumentParser()
@@ -44,8 +44,8 @@ args = parser.parse_args()
 def configure_dataengine_service(instance, emr_conf):
     emr_conf['instance_ip'] = instance.get('PrivateIpAddress')
     try:
-        logging.info('[CREATING DLAB SSH USER ON DATAENGINE SERVICE]')
-        print('[CREATING DLAB SSH USER ON DATAENGINE SERVICE]')
+        logging.info('[CREATING DATA LAB SSH USER ON DATAENGINE SERVICE]')
+        print('[CREATING DATA LAB SSH USER ON DATAENGINE SERVICE]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
             (emr_conf['instance_ip'], emr_conf['key_path'], emr_conf['initial_user'],
              emr_conf['os_user'], emr_conf['sudo_group'])
@@ -55,8 +55,8 @@ def configure_dataengine_service(instance, emr_conf):
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to create dlab ssh user.", str(err))
-        dlab.actions_lib.terminate_emr(emr_conf['cluster_id'])
+        datalab.fab.append_result("Failed to create Data Lab ssh user.", str(err))
+        datalab.actions_lib.terminate_emr(emr_conf['cluster_id'])
         sys.exit(1)
 
     # configuring proxy on Data Engine service
@@ -73,15 +73,15 @@ def configure_dataengine_service(instance, emr_conf):
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure proxy.", str(err))
-        dlab.actions_lib.terminate_emr(emr_conf['cluster_id'])
+        datalab.fab.append_result("Failed to configure proxy.", str(err))
+        datalab.actions_lib.terminate_emr(emr_conf['cluster_id'])
         sys.exit(1)
 
     try:
         logging.info('[CONFIGURE DATAENGINE SERVICE]')
         print('[CONFIGURE DATAENGINE SERVICE]')
         try:
-            dlab.fab.configure_data_engine_service_pip(emr_conf['instance_ip'], emr_conf['os_user'],
+            datalab.fab.configure_data_engine_service_pip(emr_conf['instance_ip'], emr_conf['os_user'],
                                                        emr_conf['key_path'], True)
             env['connection_attempts'] = 100
             env.key_filename = emr_conf['key_path']
@@ -93,8 +93,8 @@ def configure_dataengine_service(instance, emr_conf):
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure dataengine service.", str(err))
-        dlab.actions_lib.terminate_emr(emr_conf['cluster_id'])
+        datalab.fab.append_result("Failed to configure dataengine service.", str(err))
+        datalab.actions_lib.terminate_emr(emr_conf['cluster_id'])
         sys.exit(1)
 
 
@@ -132,11 +132,11 @@ def configure_dataengine_service(instance, emr_conf):
         try:
             local("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params))
         except:
-            dlab.fab.append_result("Failed edge reverse proxy template")
+            datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed edge reverse proxy template", str(err))
-        dlab.actions_lib.terminate_emr(emr_conf['cluster_id'])
+        datalab.fab.append_result("Failed edge reverse proxy template", str(err))
+        datalab.actions_lib.terminate_emr(emr_conf['cluster_id'])
         sys.exit(1)
 
     try:
@@ -151,8 +151,8 @@ def configure_dataengine_service(instance, emr_conf):
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed installing users key", str(err))
-        dlab.actions_lib.terminate_emr(emr_conf['cluster_id'])
+        datalab.fab.append_result("Failed installing users key", str(err))
+        datalab.actions_lib.terminate_emr(emr_conf['cluster_id'])
         sys.exit(1)
 
 
@@ -165,7 +165,7 @@ if __name__ == "__main__":
                         filename=local_log_filepath)
 
     try:
-        dlab.actions_lib.create_aws_config_files()
+        datalab.actions_lib.create_aws_config_files()
         print('Generating infrastructure names and tags')
         emr_conf = dict()
         if 'exploratory_name' in os.environ:
@@ -187,7 +187,7 @@ if __name__ == "__main__":
         emr_conf['master_instance_type'] = os.environ['emr_master_instance_type']
         emr_conf['slave_instance_type'] = os.environ['emr_slave_instance_type']
         emr_conf['instance_count'] = os.environ['emr_instance_count']
-        emr_conf['notebook_ip'] = dlab.meta_lib.get_instance_ip_address(
+        emr_conf['notebook_ip'] = datalab.meta_lib.get_instance_ip_address(
             emr_conf['tag_name'], os.environ['notebook_instance_name']).get('Private')
         emr_conf['network_type'] = os.environ['conf_network_type']
         emr_conf['role_service_name'] = os.environ['emr_service_role']
@@ -209,7 +209,7 @@ if __name__ == "__main__":
                                                                emr_conf['endpoint_name']).lower().replace('_', '-')
         tag = {"Key": "{}-tag".format(emr_conf['service_base_name']), "Value": "{}-{}-{}-subnet".format(
             emr_conf['service_base_name'], emr_conf['project_name'], emr_conf['endpoint_name'])}
-        emr_conf['subnet_cidr'] = dlab.meta_lib.get_subnet_by_tag(tag)
+        emr_conf['subnet_cidr'] = datalab.meta_lib.get_subnet_by_tag(tag)
         emr_conf['key_path'] = '{}/{}.pem'.format(os.environ['conf_key_dir'],
                                                   os.environ['conf_key_name'])
         emr_conf['all_ip_cidr'] = '0.0.0.0/0'
@@ -217,23 +217,23 @@ if __name__ == "__main__":
                                                                                    emr_conf['project_name'],
                                                                                    emr_conf['endpoint_name'])
         emr_conf['vpc_id'] = os.environ['aws_vpc_id']
-        emr_conf['cluster_id'] = dlab.meta_lib.get_emr_id_by_name(emr_conf['cluster_name'])
-        emr_conf['cluster_instances'] = dlab.meta_lib.get_emr_instances_list(emr_conf['cluster_id'])
-        emr_conf['cluster_master_instances'] = dlab.meta_lib.get_emr_instances_list(emr_conf['cluster_id'], 'MASTER')
-        emr_conf['cluster_core_instances'] = dlab.meta_lib.get_emr_instances_list(emr_conf['cluster_id'], 'CORE')
+        emr_conf['cluster_id'] = datalab.meta_lib.get_emr_id_by_name(emr_conf['cluster_name'])
+        emr_conf['cluster_instances'] = datalab.meta_lib.get_emr_instances_list(emr_conf['cluster_id'])
+        emr_conf['cluster_master_instances'] = datalab.meta_lib.get_emr_instances_list(emr_conf['cluster_id'], 'MASTER')
+        emr_conf['cluster_core_instances'] = datalab.meta_lib.get_emr_instances_list(emr_conf['cluster_id'], 'CORE')
         emr_conf['edge_instance_name'] = '{0}-{1}-{2}-edge'.format(emr_conf['service_base_name'],
                                                                    emr_conf['project_name'], emr_conf['endpoint_name'])
-        emr_conf['edge_instance_hostname'] = dlab.meta_lib.get_instance_private_ip_address(
+        emr_conf['edge_instance_hostname'] = datalab.meta_lib.get_instance_private_ip_address(
             emr_conf['tag_name'], emr_conf['edge_instance_name'])
-        emr_conf['edge_instance_hostname'] = dlab.meta_lib.get_instance_hostname(emr_conf['tag_name'],
+        emr_conf['edge_instance_hostname'] = datalab.meta_lib.get_instance_hostname(emr_conf['tag_name'],
                                                                                  emr_conf['edge_instance_name'])
         emr_conf['user_keyname'] = emr_conf['project_name']
         emr_conf['os_user'] = os.environ['conf_os_user']
         emr_conf['initial_user'] = 'ec2-user'
         emr_conf['sudo_group'] = 'wheel'
     except Exception as err:
-        dlab.fab.append_result("Failed to generate variables dictionary", str(err))
-        dlab.actions_lib.terminate_emr(emr_conf['cluster_id'])
+        datalab.fab.append_result("Failed to generate variables dictionary", str(err))
+        datalab.actions_lib.terminate_emr(emr_conf['cluster_id'])
         sys.exit(1)
 
     try:
@@ -262,7 +262,7 @@ if __name__ == "__main__":
         print('[SUMMARY]')
         print("Service base name: {}".format(emr_conf['service_base_name']))
         print("Cluster name: {}".format(emr_conf['cluster_name']))
-        print("Cluster id: {}".format(dlab.meta_lib.get_emr_id_by_name(emr_conf['cluster_name'])))
+        print("Cluster id: {}".format(datalab.meta_lib.get_emr_id_by_name(emr_conf['cluster_name'])))
         print("Key name: {}".format(emr_conf['key_name']))
         print("Region: {}".format(emr_conf['region']))
         print("EMR version: {}".format(emr_conf['release_label']))
@@ -273,7 +273,7 @@ if __name__ == "__main__":
         print("Bucket name: {}".format(emr_conf['bucket_name']))
         with open("/root/result.json", 'w') as result:
             res = {"hostname": emr_conf['cluster_name'],
-                   "instance_id": dlab.meta_lib.get_emr_id_by_name(emr_conf['cluster_name']),
+                   "instance_id": datalab.meta_lib.get_emr_id_by_name(emr_conf['cluster_name']),
                    "key_name": emr_conf['key_name'],
                    "user_own_bucket_name": emr_conf['bucket_name'],
                    "Action": "Create new EMR cluster",
@@ -286,6 +286,6 @@ if __name__ == "__main__":
             print(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results", str(err))
-        dlab.actions_lib.terminate_emr(emr_conf['cluster_id'])
+        datalab.fab.append_result("Error with writing results", str(err))
+        datalab.actions_lib.terminate_emr(emr_conf['cluster_id'])
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_create.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_create.py
index 0cdba1f..df9d989 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_create.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_create.py
@@ -28,8 +28,8 @@ import re
 import time
 import sys
 from fabric.api import *
-from dlab.meta_lib import *
-from dlab.actions_lib import *
+from datalab.meta_lib import *
+from datalab.actions_lib import *
 import json
 import traceback
 import logging
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_install_libs.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_install_libs.py
index a8a1e47..a69b140 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_install_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_install_libs.py
@@ -25,9 +25,9 @@ import os
 import sys
 import logging
 import traceback
-from dlab.fab import *
-from dlab.meta_lib import *
-from dlab.actions_lib import *
+from datalab.fab import *
+from datalab.meta_lib import *
+from datalab.actions_lib import *
 from fabric.api import *
 import multiprocessing
 
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_list_libs.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_list_libs.py
index 9360375..a9c5364 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_list_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_list_libs.py
@@ -25,9 +25,9 @@ import os
 import sys
 import logging
 import traceback
-from dlab.fab import *
-from dlab.meta_lib import *
-from dlab.actions_lib import *
+from datalab.fab import *
+from datalab.meta_lib import *
+from datalab.actions_lib import *
 from fabric.api import *
 
 if __name__ == "__main__":
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_prepare.py
index 3a08ba4..ecb3ed1 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_prepare.py
@@ -24,9 +24,9 @@
 import json
 import time
 from fabric.api import *
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import traceback
 import argparse
 import sys
@@ -54,24 +54,24 @@ if __name__ == "__main__":
             emr_conf['exploratory_name'] = ''
         if os.path.exists('/response/.emr_creating_{}'.format(emr_conf['exploratory_name'])):
             time.sleep(30)
-        dlab.actions_lib.create_aws_config_files()
+        datalab.actions_lib.create_aws_config_files()
         emr_conf['service_base_name'] = os.environ['conf_service_base_name']
         emr_conf['project_name'] = os.environ['project_name']
         emr_conf['endpoint_name'] = os.environ['endpoint_name']
-        edge_status = dlab.meta_lib.get_instance_status(
+        edge_status = datalab.meta_lib.get_instance_status(
             '{}-tag'.format(emr_conf['service_base_name']),
             '{0}-{1}-{2}-edge'.format(emr_conf['service_base_name'], emr_conf['project_name'],
                                       emr_conf['endpoint_name']))
         if edge_status != 'running':
             logging.info('ERROR: Edge node is unavailable! Aborting...')
             print('ERROR: Edge node is unavailable! Aborting...')
-            ssn_hostname = dlab.meta_lib.get_instance_hostname(
+            ssn_hostname = datalab.meta_lib.get_instance_hostname(
                 emr_conf['service_base_name'] + '-tag',
                 emr_conf['service_base_name'] + '-ssn')
-            dlab.fab.put_resource_status('edge', 'Unavailable',
-                                         os.environ['ssn_dlab_path'],
+            datalab.fab.put_resource_status('edge', 'Unavailable',
+                                         os.environ['ssn_datalab_path'],
                                          os.environ['conf_os_user'], ssn_hostname)
-            dlab.fab.append_result("Edge node is unavailable")
+            datalab.fab.append_result("Edge node is unavailable")
             sys.exit(1)
         print('Generating infrastructure names and tags')
         if 'computational_name' in os.environ:
@@ -91,7 +91,7 @@ if __name__ == "__main__":
         emr_conf['master_instance_type'] = os.environ['emr_master_instance_type']
         emr_conf['slave_instance_type'] = os.environ['emr_slave_instance_type']
         emr_conf['instance_count'] = os.environ['emr_instance_count']
-        emr_conf['notebook_ip'] = dlab.meta_lib.get_instance_ip_address(
+        emr_conf['notebook_ip'] = datalab.meta_lib.get_instance_ip_address(
             emr_conf['tag_name'], os.environ['notebook_instance_name']).get('Private')
         emr_conf['role_service_name'] = os.environ['emr_service_role']
         emr_conf['role_ec2_name'] = os.environ['emr_ec2_role']
@@ -122,7 +122,7 @@ if __name__ == "__main__":
         tag = {"Key": "{}-tag".format(emr_conf['service_base_name']),
                "Value": "{}-{}-{}-subnet".format(emr_conf['service_base_name'], emr_conf['project_name'],
                                                  emr_conf['endpoint_name'])}
-        emr_conf['subnet_cidr'] = dlab.meta_lib.get_subnet_by_tag(tag)
+        emr_conf['subnet_cidr'] = datalab.meta_lib.get_subnet_by_tag(tag)
         emr_conf['key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
         emr_conf['all_ip_cidr'] = '0.0.0.0/0'
         emr_conf['additional_emr_sg_name'] = '{}-{}-{}-de-se-additional-sg'\
@@ -131,14 +131,14 @@ if __name__ == "__main__":
         emr_conf['vpc2_id'] = os.environ['aws_notebook_vpc_id']
         emr_conf['provision_instance_ip'] = None
         try:
-            emr_conf['provision_instance_ip'] = dlab.meta_lib.get_instance_ip_address(
+            emr_conf['provision_instance_ip'] = datalab.meta_lib.get_instance_ip_address(
                 emr_conf['tag_name'], '{0}-{1}-endpoint'.format(emr_conf['service_base_name'],
                                                                 emr_conf['endpoint_name'])).get('Private') + "/32"
         except:
-            emr_conf['provision_instance_ip'] = dlab.meta_lib.get_instance_ip_address(
+            emr_conf['provision_instance_ip'] = datalab.meta_lib.get_instance_ip_address(
                 emr_conf['tag_name'], '{0}-ssn'.format(emr_conf['service_base_name'])).get('Private') + "/32"
         if os.environ['emr_slave_instance_spot'] == 'True':
-            ondemand_price = float(dlab.meta_lib.get_ec2_price(emr_conf['slave_instance_type'], emr_conf['region']))
+            ondemand_price = float(datalab.meta_lib.get_ec2_price(emr_conf['slave_instance_type'], emr_conf['region']))
             emr_conf['slave_bid_price'] = (ondemand_price * int(os.environ['emr_slave_instance_spot_pct_price'])) / 100
         else:
             emr_conf['slave_bid_price'] = 0
@@ -147,7 +147,7 @@ if __name__ == "__main__":
         else:
             emr_conf['emr_timeout'] = "1200"
     except Exception as err:
-        dlab.fab.append_result("Failed to generate variables dictionary", str(err))
+        datalab.fab.append_result("Failed to generate variables dictionary", str(err))
         sys.exit(1)
 
     print("Will create exploratory environment with edge node as access point as following: {}".format(
@@ -159,11 +159,11 @@ if __name__ == "__main__":
         json.dump(data, f)
 
     try:
-        dlab.meta_lib.emr_waiter(emr_conf['tag_name'], os.environ['notebook_instance_name'])
+        datalab.meta_lib.emr_waiter(emr_conf['tag_name'], os.environ['notebook_instance_name'])
         local('touch /response/.emr_creating_{}'.format(emr_conf['exploratory_name']))
     except Exception as err:
         traceback.print_exc()
-        dlab.fab.append_result("EMR waiter fail.", str(err))
+        datalab.fab.append_result("EMR waiter fail.", str(err))
         sys.exit(1)
 
     with open('/root/result.json', 'w') as f:
@@ -173,8 +173,8 @@ if __name__ == "__main__":
     logging.info('[CREATING ADDITIONAL SECURITY GROUPS FOR EMR]')
     print("[CREATING ADDITIONAL SECURITY GROUPS FOR EMR]")
     try:
-        edge_group_id = dlab.meta_lib.check_security_group(emr_conf['edge_security_group_name'])
-        cluster_sg_ingress = dlab.meta_lib.format_sg([
+        edge_group_id = datalab.meta_lib.check_security_group(emr_conf['edge_security_group_name'])
+        cluster_sg_ingress = datalab.meta_lib.format_sg([
             {
                 "IpProtocol": "-1",
                 "IpRanges": [{"CidrIp": emr_conf['subnet_cidr']}],
@@ -194,7 +194,7 @@ if __name__ == "__main__":
                 "PrefixListIds": []
             }
         ])
-        cluster_sg_egress = dlab.meta_lib.format_sg([
+        cluster_sg_egress = datalab.meta_lib.format_sg([
             {
                 "IpProtocol": "-1",
                 "IpRanges": [{"CidrIp": emr_conf['subnet_cidr']}],
@@ -249,7 +249,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to create sg.", str(err))
+        datalab.fab.append_result("Failed to create sg.", str(err))
         sys.exit(1)
 
     local("echo Waiting for changes to propagate; sleep 10")
@@ -312,8 +312,8 @@ if __name__ == "__main__":
         keyfile_name = "{}{}.pem".format(os.environ['conf_key_dir'], emr_conf['key_name'])
         local('rm /response/.emr_creating_{}'.format(emr_conf['exploratory_name']))
     except Exception as err:
-        dlab.fab.append_result("Failed to create EMR Cluster.", str(err))
+        datalab.fab.append_result("Failed to create EMR Cluster.", str(err))
         local('rm /response/.emr_creating_{}'.format(emr_conf['exploratory_name']))
-        emr_id = dlab.meta_lib.get_emr_id_by_name(emr_conf['cluster_name'])
-        dlab.actions_lib.terminate_emr(emr_id)
+        emr_id = datalab.meta_lib.get_emr_id_by_name(emr_conf['cluster_name'])
+        datalab.actions_lib.terminate_emr(emr_id)
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_terminate.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_terminate.py
index e9551e3..d6b86f5 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_terminate.py
@@ -21,9 +21,9 @@
 #
 # ******************************************************************************
 
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import boto3
 import logging
 import argparse
@@ -36,7 +36,7 @@ import json
 def terminate_emr_cluster(emr_name, bucket_name, tag_name, nb_tag_value, ssh_user, key_path):
     print('Terminating EMR cluster and cleaning EMR config from S3 bucket')
     try:
-        clusters_list = dlab.meta_lib.get_emr_list(emr_name, 'Value')
+        clusters_list = datalab.meta_lib.get_emr_list(emr_name, 'Value')
         if clusters_list:
             for cluster_id in clusters_list:
                 computational_name = ''
@@ -48,12 +48,12 @@ def terminate_emr_cluster(emr_name, bucket_name, tag_name, nb_tag_value, ssh_use
                 for tag in cluster.get('Tags'):
                     if tag.get('Key') == 'ComputationalName':
                         computational_name = tag.get('Value')
-                dlab.actions_lib.s3_cleanup(bucket_name, emr_name, os.environ['project_name'])
+                datalab.actions_lib.s3_cleanup(bucket_name, emr_name, os.environ['project_name'])
                 print("The bucket {} has been cleaned successfully".format(bucket_name))
-                dlab.actions_lib.terminate_emr(cluster_id)
+                datalab.actions_lib.terminate_emr(cluster_id)
                 print("The EMR cluster {} has been terminated successfully".format(emr_name))
                 print("Removing EMR kernels from notebook")
-                dlab.actions_lib.remove_kernels(emr_name, tag_name, nb_tag_value, ssh_user, key_path,
+                datalab.actions_lib.remove_kernels(emr_name, tag_name, nb_tag_value, ssh_user, key_path,
                                                 emr_version, computational_name)
         else:
             print("There are no EMR clusters to terminate.")
@@ -70,7 +70,7 @@ if __name__ == "__main__":
                         filename=local_log_filepath)
 
     # generating variables dictionary
-    dlab.actions_lib.create_aws_config_files()
+    datalab.actions_lib.create_aws_config_files()
     print('Generating infrastructure names and tags')
     emr_conf = dict()
     emr_conf['service_base_name'] = (os.environ['conf_service_base_name'])
@@ -91,7 +91,7 @@ if __name__ == "__main__":
                                   emr_conf['notebook_name'], os.environ['conf_os_user'], emr_conf['key_path'])
         except Exception as err:
             traceback.print_exc()
-            dlab.fab.append_result("Failed to terminate EMR cluster.", str(err))
+            datalab.fab.append_result("Failed to terminate EMR cluster.", str(err))
             raise Exception
     except:
         sys.exit(1)
@@ -105,5 +105,5 @@ if __name__ == "__main__":
             print(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results", str(err))
+        datalab.fab.append_result("Error with writing results", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py
index e0a4f0c..44c702b 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py
@@ -24,9 +24,9 @@
 import json
 import time
 from fabric.api import *
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import traceback
 import sys
 import os
@@ -38,13 +38,13 @@ import multiprocessing
 
 def configure_slave(slave_number, data_engine):
     slave_name = data_engine['slave_node_name'] + '{}'.format(slave_number + 1)
-    slave_hostname = dlab.meta_lib.get_instance_private_ip_address(data_engine['tag_name'], slave_name)
+    slave_hostname = datalab.meta_lib.get_instance_private_ip_address(data_engine['tag_name'], slave_name)
     try:
-        logging.info('[CREATING DLAB SSH USER ON SLAVE NODE]')
-        print('[CREATING DLAB SSH USER ON SLAVE NODE]')
+        logging.info('[CREATING DATA LAB SSH USER ON SLAVE NODE]')
+        print('[CREATING DATA LAB SSH USER ON SLAVE NODE]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             master_node_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], data_engine['key_name']),
-            data_engine['initial_user'], data_engine['dlab_ssh_user'], data_engine['sudo_group'])
+            data_engine['initial_user'], data_engine['datalab_ssh_user'], data_engine['sudo_group'])
 
         try:
             local("~/scripts/{}.py {}".format('create_ssh_user', params))
@@ -53,14 +53,14 @@ def configure_slave(slave_number, data_engine):
             raise Exception
     except Exception as err:
         clear_resources()
-        dlab.fab.append_result("Failed to create ssh user on slave.", str(err))
+        datalab.fab.append_result("Failed to create ssh user on slave.", str(err))
         sys.exit(1)
 
     try:
         logging.info('[CLEANING INSTANCE FOR SLAVE NODE]')
         print('[CLEANING INSTANCE FOR SLAVE NODE]')
         params = '--hostname {} --keyfile {} --os_user {} --application {}' \
-            .format(slave_hostname, keyfile_name, data_engine['dlab_ssh_user'], os.environ['application'])
+            .format(slave_hostname, keyfile_name, data_engine['datalab_ssh_user'], os.environ['application'])
         try:
             local("~/scripts/{}.py {}".format('common_clean_instance', params))
         except:
@@ -68,7 +68,7 @@ def configure_slave(slave_number, data_engine):
             raise Exception
     except Exception as err:
         clear_resources()
-        dlab.fab.append_result("Failed to clean slave instance.", str(err))
+        datalab.fab.append_result("Failed to clean slave instance.", str(err))
         sys.exit(1)
 
     try:
@@ -77,7 +77,7 @@ def configure_slave(slave_number, data_engine):
         additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(slave_hostname, slave_name, keyfile_name, json.dumps(additional_config),
-                    data_engine['dlab_ssh_user'])
+                    data_engine['datalab_ssh_user'])
         try:
             local("~/scripts/{}.py {}".format('common_configure_proxy', params))
         except:
@@ -85,14 +85,14 @@ def configure_slave(slave_number, data_engine):
             raise Exception
     except Exception as err:
         clear_resources()
-        dlab.fab.append_result("Failed to configure proxy on slave.", str(err))
+        datalab.fab.append_result("Failed to configure proxy on slave.", str(err))
         sys.exit(1)
 
     try:
         logging.info('[INSTALLING PREREQUISITES ON SLAVE NODE]')
         print('[INSTALLING PREREQUISITES ON SLAVE NODE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
-            format(slave_hostname, keyfile_name, data_engine['dlab_ssh_user'], data_engine['region'],
+            format(slave_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_ip)
         try:
             local("~/scripts/{}.py {}".format('install_prerequisites', params))
@@ -101,7 +101,7 @@ def configure_slave(slave_number, data_engine):
             raise Exception
     except Exception as err:
         clear_resources()
-        dlab.fab.append_result("Failed to install prerequisites on slave.", str(err))
+        datalab.fab.append_result("Failed to install prerequisites on slave.", str(err))
         sys.exit(1)
 
     try:
@@ -110,7 +110,7 @@ def configure_slave(slave_number, data_engine):
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
                  "--scala_version {} --r_mirror {} --master_ip {} --node_type {}". \
             format(slave_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
-                   os.environ['notebook_hadoop_version'], data_engine['dlab_ssh_user'],
+                   os.environ['notebook_hadoop_version'], data_engine['datalab_ssh_user'],
                    os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
                    'slave')
         try:
@@ -120,7 +120,7 @@ def configure_slave(slave_number, data_engine):
             raise Exception
     except Exception as err:
         clear_resources()
-        dlab.fab.append_result("Failed to configure slave node.", str(err))
+        datalab.fab.append_result("Failed to configure slave node.", str(err))
         sys.exit(1)
 
     try:
@@ -128,7 +128,7 @@ def configure_slave(slave_number, data_engine):
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": data_engine['user_keyname'], "user_keydir": os.environ['conf_key_dir']}
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
-            slave_hostname, keyfile_name, json.dumps(additional_config), data_engine['dlab_ssh_user'])
+            slave_hostname, keyfile_name, json.dumps(additional_config), data_engine['datalab_ssh_user'])
         try:
             local("~/scripts/{}.py {}".format('install_user_key', params))
         except:
@@ -136,15 +136,15 @@ def configure_slave(slave_number, data_engine):
             raise Exception
     except Exception as err:
         clear_resources()
-        dlab.fab.append_result("Failed install users key on slave node.", str(err))
+        datalab.fab.append_result("Failed install users key on slave node.", str(err))
         sys.exit(1)
 
 
 def clear_resources():
-    dlab.actions_lib.remove_ec2(data_engine['tag_name'], data_engine['master_node_name'])
+    datalab.actions_lib.remove_ec2(data_engine['tag_name'], data_engine['master_node_name'])
     for i in range(data_engine['instance_count'] - 1):
         slave_name = data_engine['slave_node_name'] + '{}'.format(i + 1)
-        dlab.actions_lib.remove_ec2(data_engine['tag_name'], slave_name)
+        datalab.actions_lib.remove_ec2(data_engine['tag_name'], slave_name)
 
 
 if __name__ == "__main__":
@@ -189,21 +189,21 @@ if __name__ == "__main__":
         tag = {"Key": data_engine['tag_name'],
                "Value": "{}-{}-{}-subnet".format(data_engine['service_base_name'], data_engine['project_name'],
                                                  data_engine['endpoint_name'])}
-        data_engine['subnet_cidr'] = dlab.meta_lib.get_subnet_by_tag(tag)
+        data_engine['subnet_cidr'] = datalab.meta_lib.get_subnet_by_tag(tag)
         data_engine['notebook_dataengine_role_profile_name'] = '{}-{}-{}-nb-de-profile' \
             .format(data_engine['service_base_name'], data_engine['project_name'], data_engine['endpoint_name'])
         data_engine['instance_count'] = int(os.environ['dataengine_instance_count'])
-        master_node_hostname = dlab.meta_lib.get_instance_hostname(data_engine['tag_name'],
+        master_node_hostname = datalab.meta_lib.get_instance_hostname(data_engine['tag_name'],
                                                                    data_engine['master_node_name'])
-        data_engine['dlab_ssh_user'] = os.environ['conf_os_user']
+        data_engine['datalab_ssh_user'] = os.environ['conf_os_user']
         data_engine['user_keyname'] = data_engine['project_name']
         keyfile_name = "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
         edge_instance_name = '{0}-{1}-{2}-edge'.format(data_engine['service_base_name'],
                                                        data_engine['project_name'], data_engine['endpoint_name'])
-        edge_instance_hostname = dlab.meta_lib.get_instance_hostname(data_engine['tag_name'], edge_instance_name)
-        edge_instance_private_ip = dlab.meta_lib.get_instance_ip_address(data_engine['tag_name'],
+        edge_instance_hostname = datalab.meta_lib.get_instance_hostname(data_engine['tag_name'], edge_instance_name)
+        edge_instance_private_ip = datalab.meta_lib.get_instance_ip_address(data_engine['tag_name'],
                                                                          edge_instance_name).get('Private')
-        data_engine['edge_instance_hostname'] = dlab.meta_lib.get_instance_hostname(data_engine['tag_name'],
+        data_engine['edge_instance_hostname'] = datalab.meta_lib.get_instance_hostname(data_engine['tag_name'],
                                                                                     edge_instance_name)
         if os.environ['conf_os_family'] == 'debian':
             data_engine['initial_user'] = 'ubuntu'
@@ -220,15 +220,15 @@ if __name__ == "__main__":
         data_engine['master_node_name'] = data_engine['cluster_name'] + '-m'
         data_engine['slave_node_name'] = data_engine['cluster_name'] + '-s'
         clear_resources()
-        dlab.fab.append_result("Failed to generate variables dictionary.", str(err))
+        datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
         sys.exit(1)
 
     try:
-        logging.info('[CREATING DLAB SSH USER ON MASTER NODE]')
-        print('[CREATING DLAB SSH USER ON MASTER NODE]')
+        logging.info('[CREATING DATA LAB SSH USER ON MASTER NODE]')
+        print('[CREATING DATA LAB SSH USER ON MASTER NODE]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             master_node_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], data_engine['key_name']),
-            data_engine['initial_user'], data_engine['dlab_ssh_user'], data_engine['sudo_group'])
+            data_engine['initial_user'], data_engine['datalab_ssh_user'], data_engine['sudo_group'])
 
         try:
             local("~/scripts/{}.py {}".format('create_ssh_user', params))
@@ -237,14 +237,14 @@ if __name__ == "__main__":
             raise Exception
     except Exception as err:
         clear_resources()
-        dlab.fab.append_result("Failed to create ssh user on master.", str(err))
+        datalab.fab.append_result("Failed to create ssh user on master.", str(err))
         sys.exit(1)
 
     try:
         logging.info('[CLEANING INSTANCE FOR MASTER NODE]')
         print('[CLEANING INSTANCE FOR MASTER NODE]')
         params = '--hostname {} --keyfile {} --os_user {} --application {}' \
-            .format(master_node_hostname, keyfile_name, data_engine['dlab_ssh_user'], os.environ['application'])
+            .format(master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'], os.environ['application'])
         try:
             local("~/scripts/{}.py {}".format('common_clean_instance', params))
         except:
@@ -252,7 +252,7 @@ if __name__ == "__main__":
             raise Exception
     except Exception as err:
         clear_resources()
-        dlab.fab.append_result("Failed to clean master instance.", str(err))
+        datalab.fab.append_result("Failed to clean master instance.", str(err))
         sys.exit(1)
 
     try:
@@ -261,7 +261,7 @@ if __name__ == "__main__":
         additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(master_node_hostname, data_engine['master_node_name'], keyfile_name, json.dumps(additional_config),
-                    data_engine['dlab_ssh_user'])
+                    data_engine['datalab_ssh_user'])
         try:
             local("~/scripts/{}.py {}".format('common_configure_proxy', params))
         except:
@@ -269,14 +269,14 @@ if __name__ == "__main__":
             raise Exception
     except Exception as err:
         clear_resources()
-        dlab.fab.append_result("Failed to configure proxy on master.", str(err))
+        datalab.fab.append_result("Failed to configure proxy on master.", str(err))
         sys.exit(1)
 
     try:
         logging.info('[INSTALLING PREREQUISITES ON MASTER NODE]')
         print('[INSTALLING PREREQUISITES ON MASTER NODE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}".\
-            format(master_node_hostname, keyfile_name, data_engine['dlab_ssh_user'], data_engine['region'],
+            format(master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_ip)
         try:
             local("~/scripts/{}.py {}".format('install_prerequisites', params))
@@ -285,7 +285,7 @@ if __name__ == "__main__":
             raise Exception
     except Exception as err:
         clear_resources()
-        dlab.fab.append_result("Failed to install prerequisites on master.", str(err))
+        datalab.fab.append_result("Failed to install prerequisites on master.", str(err))
         sys.exit(1)
 
     try:
@@ -293,7 +293,7 @@ if __name__ == "__main__":
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": data_engine['user_keyname'], "user_keydir": os.environ['conf_key_dir']}
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
-            master_node_hostname, keyfile_name, json.dumps(additional_config), data_engine['dlab_ssh_user'])
+            master_node_hostname, keyfile_name, json.dumps(additional_config), data_engine['datalab_ssh_user'])
         try:
             local("~/scripts/{}.py {}".format('install_user_key', params))
         except:
@@ -301,7 +301,7 @@ if __name__ == "__main__":
             raise Exception
     except Exception as err:
         clear_resources()
-        dlab.fab.append_result("Failed install users key on master node.", str(err))
+        datalab.fab.append_result("Failed install users key on master node.", str(err))
         sys.exit(1)
 
     try:
@@ -310,7 +310,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
                  "--scala_version {} --r_mirror {} --master_ip {} --node_type {}".\
             format(master_node_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
-                   os.environ['notebook_hadoop_version'], data_engine['dlab_ssh_user'],
+                   os.environ['notebook_hadoop_version'], data_engine['datalab_ssh_user'],
                    os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
                    'master')
         try:
@@ -319,7 +319,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure master node", str(err))
+        datalab.fab.append_result("Failed to configure master node", str(err))
         clear_resources()
         sys.exit(1)
 
@@ -335,14 +335,14 @@ if __name__ == "__main__":
             if job.exitcode != 0:
                 raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure slave nodes.", str(err))
+        datalab.fab.append_result("Failed to configure slave nodes.", str(err))
         clear_resources()
         sys.exit(1)
 
     try:
         print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
-        notebook_instance_ip = dlab.meta_lib.get_instance_private_ip_address('Name',
+        notebook_instance_ip = datalab.meta_lib.get_instance_private_ip_address('Name',
                                                                              os.environ['notebook_instance_name'])
         additional_info = {
             "computational_name": data_engine['computational_name'],
@@ -361,22 +361,22 @@ if __name__ == "__main__":
                  "--additional_info '{}'"\
             .format(edge_instance_hostname,
                     keyfile_name,
-                    data_engine['dlab_ssh_user'],
+                    data_engine['datalab_ssh_user'],
                     'spark',
                     data_engine['exploratory_name'],
                     json.dumps(additional_info))
         try:
             local("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params))
         except:
-            dlab.fab.append_result("Failed edge reverse proxy template")
+            datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure reverse proxy.", str(err))
+        datalab.fab.append_result("Failed to configure reverse proxy.", str(err))
         clear_resources()
         sys.exit(1)
 
     try:
-        ip_address = dlab.meta_lib.get_instance_ip_address(data_engine['tag_name'],
+        ip_address = datalab.meta_lib.get_instance_ip_address(data_engine['tag_name'],
                                                            data_engine['master_node_name']).get('Private')
         spark_master_url = "http://" + ip_address + ":8080"
         spark_master_access_url = "https://{}/{}_{}/".format(data_engine['edge_instance_hostname'],
@@ -392,7 +392,7 @@ if __name__ == "__main__":
         print("Instance count: {}".format(str(data_engine['instance_count'])))
         with open("/root/result.json", 'w') as result:
             res = {"hostname": data_engine['cluster_name'],
-                   "instance_id": dlab.meta_lib.get_instance_by_name(data_engine['tag_name'],
+                   "instance_id": datalab.meta_lib.get_instance_by_name(data_engine['tag_name'],
                                                                      data_engine['master_node_name']),
                    "key_name": data_engine['key_name'],
                    "Action": "Create new Data Engine",
@@ -405,6 +405,6 @@ if __name__ == "__main__":
             print(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results", str(err))
+        datalab.fab.append_result("Error with writing results", str(err))
         clear_resources()
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_prepare.py
index ad19f7a..b9d5cb8 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_prepare.py
@@ -24,9 +24,9 @@
 import json
 import time
 from fabric.api import *
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import traceback
 import sys
 import os
@@ -44,22 +44,22 @@ if __name__ == "__main__":
                         level=logging.INFO,
                         filename=local_log_filepath)
     try:
-        dlab.actions_lib.create_aws_config_files()
+        datalab.actions_lib.create_aws_config_files()
         data_engine = dict()
         data_engine['service_base_name'] = (os.environ['conf_service_base_name'])
         data_engine['project_name'] = os.environ['project_name']
         data_engine['endpoint_name'] = os.environ['endpoint_name']
-        edge_status = dlab.meta_lib.get_instance_status(
+        edge_status = datalab.meta_lib.get_instance_status(
             data_engine['service_base_name'] + '-tag', '{0}-{1}-{2}-edge'.format(
                 data_engine['service_base_name'], data_engine['project_name'], data_engine['endpoint_name']))
         if edge_status != 'running':
             logging.info('ERROR: Edge node is unavailable! Aborting...')
             print('ERROR: Edge node is unavailable! Aborting...')
-            ssn_hostname = dlab.meta_lib.get_instance_hostname(data_engine['service_base_name'] + '-tag',
+            ssn_hostname = datalab.meta_lib.get_instance_hostname(data_engine['service_base_name'] + '-tag',
                                                                data_engine['service_base_name'] + '-ssn')
-            dlab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_dlab_path'], os.environ['conf_os_user'],
+            datalab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_datalab_path'], os.environ['conf_os_user'],
                                          ssn_hostname)
-            dlab.fab.append_result("Edge node is unavailable")
+            datalab.fab.append_result("Edge node is unavailable")
             sys.exit(1)
         print('Generating infrastructure names and tags')
         if 'exploratory_name' in os.environ:
@@ -89,7 +89,7 @@ if __name__ == "__main__":
         tag = {"Key": data_engine['tag_name'],
                "Value": "{}-{}-{}-subnet".format(data_engine['service_base_name'], data_engine['project_name'],
                                                  data_engine['endpoint_name'])}
-        data_engine['subnet_cidr'] = dlab.meta_lib.get_subnet_by_tag(tag)
+        data_engine['subnet_cidr'] = datalab.meta_lib.get_subnet_by_tag(tag)
         data_engine['notebook_dataengine_role_profile_name'] = '{}-{}-{}-nb-de-profile' \
             .format(data_engine['service_base_name'], data_engine['project_name'], data_engine['endpoint_name'])
         data_engine['instance_count'] = int(os.environ['dataengine_instance_count'])
@@ -120,9 +120,9 @@ if __name__ == "__main__":
                     x != 'None' and x != '')
             else data_engine['expected_image_name'])(str(os.environ.get('notebook_image_name')))
         print('Searching pre-configured images')
-        data_engine['ami_id'] = dlab.meta_lib.get_ami_id(os.environ['aws_{}_image_name'.format(
+        data_engine['ami_id'] = datalab.meta_lib.get_ami_id(os.environ['aws_{}_image_name'.format(
             os.environ['conf_os_family'])])
-        image_id = dlab.meta_lib.get_ami_id_by_name(data_engine['notebook_image_name'], 'available')
+        image_id = datalab.meta_lib.get_ami_id_by_name(data_engine['notebook_image_name'], 'available')
         if image_id != '' and os.environ['application'] in os.environ['dataengine_image_notebooks'].split(','):
             data_engine['ami_id'] = image_id
             print('Pre-configured image found. Using: {}'.format(data_engine['ami_id']))
@@ -131,7 +131,7 @@ if __name__ == "__main__":
             print('No pre-configured image found. Using default one: {}'.format(data_engine['ami_id']))
 
     except Exception as err:
-        dlab.fab.append_result("Failed to generate variables dictionary.", str(err))
+        datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
         sys.exit(1)
 
     with open('/root/result.json', 'w') as f:
@@ -155,23 +155,23 @@ if __name__ == "__main__":
                  "--instance_class {}" \
             .format(data_engine['master_node_name'], data_engine['ami_id'], data_engine['master_size'],
                     data_engine['key_name'],
-                    dlab.meta_lib.get_security_group_by_name(data_engine['dataengine_master_security_group_name']),
-                    dlab.meta_lib.get_subnet_by_cidr(data_engine['subnet_cidr'], os.environ['aws_notebook_vpc_id']),
+                    datalab.meta_lib.get_security_group_by_name(data_engine['dataengine_master_security_group_name']),
+                    datalab.meta_lib.get_subnet_by_cidr(data_engine['subnet_cidr'], os.environ['aws_notebook_vpc_id']),
                     data_engine['notebook_dataengine_role_profile_name'], data_engine['tag_name'],
                     data_engine['master_node_name'], data_engine['primary_disk_size'], data_engine['instance_class'])
         try:
             local("~/scripts/{}.py {}".format('common_create_instance', params))
-            data_engine['master_id'] = dlab.meta_lib.get_instance_by_name(data_engine['tag_name'],
+            data_engine['master_id'] = datalab.meta_lib.get_instance_by_name(data_engine['tag_name'],
                                                                           data_engine['master_node_name'])
-            dlab.actions_lib.create_tag(data_engine['master_id'], data_engine['cluster_nodes_tag'], False)
-            dlab.actions_lib.create_tag(data_engine['master_id'], data_engine['cluster_nodes_resource_tag'], False)
-            dlab.actions_lib.create_tag(data_engine['master_id'], data_engine['cluster_nodes_billing_tag'], False)
-            dlab.actions_lib.create_tag(data_engine['master_id'], data_engine['cluster_nodes_tag_type'], False)
+            datalab.actions_lib.create_tag(data_engine['master_id'], data_engine['cluster_nodes_tag'], False)
+            datalab.actions_lib.create_tag(data_engine['master_id'], data_engine['cluster_nodes_resource_tag'], False)
+            datalab.actions_lib.create_tag(data_engine['master_id'], data_engine['cluster_nodes_billing_tag'], False)
+            datalab.actions_lib.create_tag(data_engine['master_id'], data_engine['cluster_nodes_tag_type'], False)
         except:
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to create master instance.", str(err))
+        datalab.fab.append_result("Failed to create master instance.", str(err))
         sys.exit(1)
 
     try:
@@ -185,27 +185,27 @@ if __name__ == "__main__":
                      "--primary_disk_size {} --instance_class {}" \
                 .format(slave_name, data_engine['ami_id'], data_engine['slave_size'],
                         data_engine['key_name'],
-                        dlab.meta_lib.get_security_group_by_name(data_engine['dataengine_slave_security_group_name']),
-                        dlab.meta_lib.get_subnet_by_cidr(data_engine['subnet_cidr'], os.environ['aws_notebook_vpc_id']),
+                        datalab.meta_lib.get_security_group_by_name(data_engine['dataengine_slave_security_group_name']),
+                        datalab.meta_lib.get_subnet_by_cidr(data_engine['subnet_cidr'], os.environ['aws_notebook_vpc_id']),
                         data_engine['notebook_dataengine_role_profile_name'], data_engine['tag_name'],
                         slave_name, data_engine['primary_disk_size'], data_engine['instance_class'])
             try:
                 local("~/scripts/{}.py {}".format('common_create_instance', params))
-                data_engine['slave_id'] = dlab.meta_lib.get_instance_by_name(data_engine['tag_name'], slave_name)
-                dlab.actions_lib.create_tag(data_engine['slave_id'], data_engine['cluster_nodes_tag'], False)
-                dlab.actions_lib.create_tag(data_engine['slave_id'], data_engine['cluster_nodes_resource_tag'], False)
-                dlab.actions_lib.create_tag(data_engine['slave_id'], data_engine['cluster_nodes_billing_tag'], False)
-                dlab.actions_lib.create_tag(data_engine['slave_id'], data_engine['cluster_nodes_tag_type'], False)
+                data_engine['slave_id'] = datalab.meta_lib.get_instance_by_name(data_engine['tag_name'], slave_name)
+                datalab.actions_lib.create_tag(data_engine['slave_id'], data_engine['cluster_nodes_tag'], False)
+                datalab.actions_lib.create_tag(data_engine['slave_id'], data_engine['cluster_nodes_resource_tag'], False)
+                datalab.actions_lib.create_tag(data_engine['slave_id'], data_engine['cluster_nodes_billing_tag'], False)
+                datalab.actions_lib.create_tag(data_engine['slave_id'], data_engine['cluster_nodes_tag_type'], False)
             except:
                 traceback.print_exc()
                 raise Exception
     except Exception as err:
-        dlab.actions_lib.remove_ec2(data_engine['tag_name'], data_engine['master_node_name'])
+        datalab.actions_lib.remove_ec2(data_engine['tag_name'], data_engine['master_node_name'])
         for i in range(data_engine['instance_count'] - 1):
             slave_name = data_engine['slave_node_name'] + '{}'.format(i+1)
             try:
-                dlab.actions_lib.remove_ec2(data_engine['tag_name'], slave_name)
+                datalab.actions_lib.remove_ec2(data_engine['tag_name'], slave_name)
             except:
                 print("The slave instance {} hasn't been created.".format(slave_name))
-        dlab.fab.append_result("Failed to create slave instances.", str(err))
+        datalab.fab.append_result("Failed to create slave instances.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py
index 0450ff7..e3150b8 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py
@@ -24,9 +24,9 @@
 import logging
 import json
 import os
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import sys
 import traceback
 from fabric.api import *
@@ -35,7 +35,7 @@ from fabric.api import *
 def start_data_engine(cluster_name):
     print("Start Data Engine")
     try:
-        dlab.actions_lib.start_ec2(os.environ['conf_tag_resource_id'], cluster_name)
+        datalab.actions_lib.start_ec2(os.environ['conf_tag_resource_id'], cluster_name)
     except:
         sys.exit(1)
 
@@ -51,7 +51,7 @@ if __name__ == "__main__":
                         filename=local_log_filepath)
 
     # generating variables dictionary
-    dlab.actions_lib.create_aws_config_files()
+    datalab.actions_lib.create_aws_config_files()
     print('Generating infrastructure names and tags')
     data_engine = dict()
     
@@ -79,7 +79,7 @@ if __name__ == "__main__":
                                          data_engine['cluster_name']))
     except Exception as err:
         print('Error: {0}'.format(err))
-        dlab.fab.append_result("Failed to start Data Engine.", str(err))
+        datalab.fab.append_result("Failed to start Data Engine.", str(err))
         sys.exit(1)
 
     try:
@@ -87,9 +87,9 @@ if __name__ == "__main__":
         print('[UPDATE LAST ACTIVITY TIME]')
         data_engine['computational_id'] = data_engine['cluster_name'] + '-m'
         data_engine['tag_name'] = data_engine['service_base_name'] + '-tag'
-        data_engine['notebook_ip'] = dlab.meta_lib.get_instance_ip_address(
+        data_engine['notebook_ip'] = datalab.meta_lib.get_instance_ip_address(
             data_engine['tag_name'], os.environ['notebook_instance_name']).get('Private')
-        data_engine['computational_ip'] = dlab.meta_lib.get_instance_ip_address(
+        data_engine['computational_ip'] = datalab.meta_lib.get_instance_ip_address(
             data_engine['tag_name'], data_engine['computational_id']).get('Private')
         data_engine['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
         params = '--os_user {0} --notebook_ip {1} --keyfile "{2}" --cluster_ip {3}' \
@@ -99,7 +99,7 @@ if __name__ == "__main__":
             local("~/scripts/{}.py {}".format('update_inactivity_on_start', params))
         except Exception as err:
             traceback.print_exc()
-            dlab.fab.append_result("Failed to update last activity time.", str(err))
+            datalab.fab.append_result("Failed to update last activity time.", str(err))
             raise Exception
     except:
         sys.exit(1)
@@ -111,5 +111,5 @@ if __name__ == "__main__":
             print(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results", str(err))
+        datalab.fab.append_result("Error with writing results", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_stop.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_stop.py
index d31d395..f0f60f4 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_stop.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_stop.py
@@ -24,15 +24,15 @@
 import logging
 import json
 import os
-import dlab.fab
-import dlab.actions_lib
+import datalab.fab
+import datalab.actions_lib
 import sys
 
 
 def stop_data_engine(cluster_name):
     print("Stop Data Engine")
     try:
-        dlab.actions_lib.stop_ec2(os.environ['conf_tag_resource_id'], cluster_name)
+        datalab.actions_lib.stop_ec2(os.environ['conf_tag_resource_id'], cluster_name)
     except:
         sys.exit(1)
 
@@ -48,7 +48,7 @@ if __name__ == "__main__":
                         filename=local_log_filepath)
 
     # generating variables dictionary
-    dlab.actions_lib.create_aws_config_files()
+    datalab.actions_lib.create_aws_config_files()
     print('Generating infrastructure names and tags')
     data_engine_config = dict()
     try:
@@ -74,7 +74,7 @@ if __name__ == "__main__":
                                         data_engine_config['cluster_name']))
     except Exception as err:
         print('Error: {0}'.format(err))
-        dlab.fab.append_result("Failed to stop Data Engine.", str(err))
+        datalab.fab.append_result("Failed to stop Data Engine.", str(err))
         sys.exit(1)
 
     try:
@@ -84,5 +84,5 @@ if __name__ == "__main__":
             print(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results", str(err))
+        datalab.fab.append_result("Error with writing results", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_terminate.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_terminate.py
index 7d8c10d..dc3a4d2 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_terminate.py
@@ -24,8 +24,8 @@
 import logging
 import json
 import sys
-import dlab.fab
-import dlab.actions_lib
+import datalab.fab
+import datalab.actions_lib
 import traceback
 import os
 
@@ -35,13 +35,13 @@ def terminate_data_engine(tag_name, notebook_name,
                           cluster_name, remote_kernel_name):
     print("Terminating data engine cluster")
     try:
-        dlab.actions_lib.remove_ec2(os.environ['conf_tag_resource_id'], cluster_name)
+        datalab.actions_lib.remove_ec2(os.environ['conf_tag_resource_id'], cluster_name)
     except:
         sys.exit(1)
 
     print("Removing Data Engine kernels from notebook")
     try:
-        dlab.actions_lib.remove_dataengine_kernels(tag_name, notebook_name,
+        datalab.actions_lib.remove_dataengine_kernels(tag_name, notebook_name,
                                                    os_user, key_path, remote_kernel_name)
     except:
         sys.exit(1)
@@ -58,7 +58,7 @@ if __name__ == "__main__":
                         filename=local_log_filepath)
     # generating variables dictionary
     print('Generating infrastructure names and tags')
-    dlab.actions_lib.create_aws_config_files()
+    datalab.actions_lib.create_aws_config_files()
     data_engine = dict()
     
     try:
@@ -92,7 +92,7 @@ if __name__ == "__main__":
                     data_engine['cluster_name']), data_engine['cluster_name'])
         except Exception as err:
             traceback.print_exc()
-            dlab.fab.append_result("Failed to terminate Data Engine.", str(err))
+            datalab.fab.append_result("Failed to terminate Data Engine.", str(err))
             raise Exception
     except:
         sys.exit(1)
@@ -104,5 +104,5 @@ if __name__ == "__main__":
             print(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results", str(err))
+        datalab.fab.append_result("Error with writing results", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
index a2ca856..1a64ae5 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
@@ -25,9 +25,9 @@ import logging
 import json
 import sys
 import traceback
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import os
 import argparse
 from fabric.api import *
@@ -81,25 +81,25 @@ if __name__ == "__main__":
                                                                          notebook_config['project_name'],
                                                                          notebook_config['endpoint_name'])
         notebook_config['tag_name'] = '{}-tag'.format(notebook_config['service_base_name'])
-        notebook_config['dlab_ssh_user'] = os.environ['conf_os_user']
-        notebook_config['ip_address'] = dlab.meta_lib.get_instance_ip_address(
+        notebook_config['datalab_ssh_user'] = os.environ['conf_os_user']
+        notebook_config['ip_address'] = datalab.meta_lib.get_instance_ip_address(
             notebook_config['tag_name'], notebook_config['instance_name']).get('Private')
 
         # generating variables regarding EDGE proxy on Notebook instance
-        instance_hostname = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
+        instance_hostname = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
                                                                 notebook_config['instance_name'])
         edge_instance_name = '{}-{}-{}-edge'.format(notebook_config['service_base_name'],
                                                     notebook_config['project_name'], notebook_config['endpoint_name'])
-        edge_instance_hostname = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'], edge_instance_name)
-        edge_instance_private_ip = dlab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
+        edge_instance_hostname = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'], edge_instance_name)
+        edge_instance_private_ip = datalab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
                                                                          edge_instance_name).get('Private')
-        notebook_config['edge_instance_hostname'] = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
+        notebook_config['edge_instance_hostname'] = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
                                                                                         edge_instance_name)
         keyfile_name = "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
-        edge_ip = dlab.meta_lib.get_instance_ip_address(notebook_config['tag_name'], edge_instance_name).get('Private')
+        edge_ip = datalab.meta_lib.get_instance_ip_address(notebook_config['tag_name'], edge_instance_name).get('Private')
     except Exception as err:
-        dlab.fab.append_result("Failed to generate variables dictionary.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -110,11 +110,11 @@ if __name__ == "__main__":
             notebook_config['initial_user'] = 'ec2-user'
             notebook_config['sudo_group'] = 'wheel'
 
-        logging.info('[CREATING DLAB SSH USER]')
-        print('[CREATING DLAB SSH USER]')
+        logging.info('[CREATING DATA LAB SSH USER]')
+        print('[CREATING DATA LAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
-            notebook_config['initial_user'], notebook_config['dlab_ssh_user'], notebook_config['sudo_group'])
+            notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
             local("~/scripts/{}.py {}".format('create_ssh_user', params))
@@ -122,8 +122,8 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed creating ssh user 'dlab'.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed creating ssh user 'datalab'.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     # configuring proxy on Notebook instance
@@ -133,15 +133,15 @@ if __name__ == "__main__":
         additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
-                    notebook_config['dlab_ssh_user'])
+                    notebook_config['datalab_ssh_user'])
         try:
             local("~/scripts/{}.py {}".format('common_configure_proxy', params))
         except:
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure proxy.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to configure proxy.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -150,15 +150,15 @@ if __name__ == "__main__":
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
-            instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['dlab_ssh_user'])
+            instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
             local("~/scripts/{}.py {}".format('install_user_key', params))
         except:
-            dlab.fab.append_result("Failed installing users key")
+            datalab.fab.append_result("Failed installing users key")
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed installing users key.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed installing users key.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     # updating repositories & installing python packages
@@ -166,7 +166,7 @@ if __name__ == "__main__":
         logging.info('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
         print('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}".format(
-            instance_hostname, keyfile_name, notebook_config['dlab_ssh_user'], os.environ['aws_region'],
+            instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
             edge_instance_private_ip)
         try:
             local("~/scripts/{}.py {}".format('install_prerequisites', params))
@@ -174,8 +174,8 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed installing apps: apt & pip.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed installing apps: apt & pip.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -186,7 +186,7 @@ if __name__ == "__main__":
                  "--scala_version {4} --spark_version {5} " \
                  "--hadoop_version {6} --region {7} " \
                  "--r_mirror {8} --ip_address {9} --exploratory_name {10} --edge_ip {11}" \
-                 .format(instance_hostname, keyfile_name, notebook_config['dlab_ssh_user'],
+                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                          os.environ['notebook_jupyter_version'], os.environ['notebook_scala_version'],
                          os.environ['notebook_spark_version'], os.environ['notebook_hadoop_version'],
                          os.environ['aws_region'], os.environ['notebook_r_mirror'],
@@ -197,23 +197,23 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure Deep Learning node.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to configure Deep Learning node.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
         print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
-            .format(notebook_config['dlab_ssh_user'], instance_hostname, keyfile_name)
+            .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
             local("~/scripts/{}.py {}".format('manage_git_creds', params))
         except:
-            dlab.fab.append_result("Failed setup git credentials")
+            datalab.fab.append_result("Failed setup git credentials")
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to setup git credentials.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to setup git credentials.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -221,7 +221,7 @@ if __name__ == "__main__":
         print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None']:
             params = "--hostname {} --keyfile {} --os_user {} --nb_tag_name {} --nb_tag_value {}" \
-                .format(instance_hostname, keyfile_name, notebook_config['dlab_ssh_user'],
+                .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['tag_name'], notebook_config['instance_name'])
             try:
                 local("~/scripts/{}.py {}".format('common_remove_remote_kernels', params))
@@ -229,8 +229,8 @@ if __name__ == "__main__":
                 traceback.print_exc()
                 raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to post configuring instance.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to post configuring instance.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -248,25 +248,25 @@ if __name__ == "__main__":
                  "--additional_info '{}'"\
             .format(edge_instance_hostname,
                     keyfile_name,
-                    notebook_config['dlab_ssh_user'],
+                    notebook_config['datalab_ssh_user'],
                     'jupyter',
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
             local("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params))
         except:
-            dlab.fab.append_result("Failed edge reverse proxy template")
+            datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
     except Exception as err:
         print('Error: {0}'.format(err))
-        dlab.fab.append_result("Failed edge reverse proxy template.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed edge reverse proxy template.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     if notebook_config['image_enabled'] == 'true':
         try:
             print('[CREATING AMI]')
-            ami_id = dlab.meta_lib.get_ami_id_by_name(notebook_config['expected_image_name'])
+            ami_id = datalab.meta_lib.get_ami_id_by_name(notebook_config['expected_image_name'])
             if ami_id == '' and notebook_config['shared_image_enabled'] == 'false':
                 print("Looks like it's first time we configure notebook server. Creating image.")
                 try:
@@ -276,7 +276,7 @@ if __name__ == "__main__":
                 except KeyError:
                     os.environ['conf_additional_tags'] = 'project_tag:{0};endpoint_tag:{1}'.format(
                         notebook_config['project_name'], notebook_config['endpoint_name'])
-                image_id = dlab.actions_lib.create_image_from_instance(
+                image_id = datalab.actions_lib.create_image_from_instance(
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
@@ -288,20 +288,20 @@ if __name__ == "__main__":
                 except KeyError:
                     os.environ['conf_additional_tags'] = 'ami:shared;endpoint_tag:{}'.format(
                         notebook_config['endpoint_name'])
-                image_id = dlab.actions_lib.create_image_from_instance(
+                image_id = datalab.actions_lib.create_image_from_instance(
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
                     print("Image was successfully created. It's ID is {}".format(image_id))
         except Exception as err:
-            dlab.fab.append_result("Failed creating image.", str(err))
-            dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+            datalab.fab.append_result("Failed creating image.", str(err))
+            datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
             sys.exit(1)
     try:
         # generating output information
-        ip_address = dlab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
+        ip_address = datalab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
                                                            notebook_config['instance_name']).get('Private')
-        dns_name = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'], notebook_config['instance_name'])
+        dns_name = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'], notebook_config['instance_name'])
         tensor_board_url = 'http://' + ip_address + ':6006'
         jupyter_url = 'http://' + ip_address + ':8888/{}/'.format(notebook_config['exploratory_name'])
         jupyter_notebook_access_url = "https://{}/{}/".format(notebook_config['edge_instance_hostname'],
@@ -316,7 +316,7 @@ if __name__ == "__main__":
         print("Instance name: {}".format(notebook_config['instance_name']))
         print("Private DNS: {}".format(dns_name))
         print("Private IP: {}".format(ip_address))
-        print("Instance ID: {}".format(dlab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
+        print("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
                                                                           notebook_config['instance_name'])))
         print("Instance type: {}".format(notebook_config['instance_type']))
         print("Key name: {}".format(notebook_config['key_name']))
@@ -327,14 +327,14 @@ if __name__ == "__main__":
 
         print("Ungit URL: {}".format(ungit_ip_url))
         print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
-              format(notebook_config['key_name'],notebook_config['dlab_ssh_user'], ip_address))
+              format(notebook_config['key_name'],notebook_config['datalab_ssh_user'], ip_address))
         print('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.
-              format(notebook_config['key_name'], notebook_config['dlab_ssh_user'], dns_name))
+              format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], dns_name))
 
         with open("/root/result.json", 'w') as result:
             res = {"hostname": dns_name,
                    "ip": ip_address,
-                   "instance_id": dlab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
+                   "instance_id": datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
                                                                      notebook_config['instance_name']),
                    "master_keyname": os.environ['conf_key_name'],
                    "notebook_name": notebook_config['instance_name'],
@@ -356,6 +356,6 @@ if __name__ == "__main__":
                    ]}
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Error with writing results.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_associate_elastic_ip.py b/infrastructure-provisioning/src/general/scripts/aws/edge_associate_elastic_ip.py
index 131ac46..2eaba02 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_associate_elastic_ip.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_associate_elastic_ip.py
@@ -22,9 +22,9 @@
 # ******************************************************************************
 
 import sys
-from dlab.fab import *
-from dlab.meta_lib import *
-from dlab.actions_lib import *
+from datalab.fab import *
+from datalab.meta_lib import *
+from datalab.actions_lib import *
 import os
 import argparse
 
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py b/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py
index bafd031..08e1775 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py
@@ -25,9 +25,9 @@ import json
 import sys
 import time
 import os
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import logging
 import traceback
 import uuid
@@ -43,18 +43,18 @@ if __name__ == "__main__":
                         filename=local_log_filepath)
 
     def clear_resources():
-        dlab.actions_lib.remove_all_iam_resources('notebook', edge_conf['project_name'])
-        dlab.actions_lib.remove_all_iam_resources('edge', edge_conf['project_name'])
-        dlab.actions_lib.remove_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
-        dlab.actions_lib.remove_sgroups(edge_conf['dataengine_instances_name'])
-        dlab.actions_lib.remove_sgroups(edge_conf['notebook_instance_name'])
-        dlab.actions_lib.remove_sgroups(edge_conf['instance_name'])
-        dlab.actions_lib.remove_s3('edge', edge_conf['project_name'])
+        datalab.actions_lib.remove_all_iam_resources('notebook', edge_conf['project_name'])
+        datalab.actions_lib.remove_all_iam_resources('edge', edge_conf['project_name'])
+        datalab.actions_lib.remove_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
+        datalab.actions_lib.remove_sgroups(edge_conf['dataengine_instances_name'])
+        datalab.actions_lib.remove_sgroups(edge_conf['notebook_instance_name'])
+        datalab.actions_lib.remove_sgroups(edge_conf['instance_name'])
+        datalab.actions_lib.remove_s3('edge', edge_conf['project_name'])
 
     try:
         print('Generating infrastructure names and tags')
         edge_conf = dict()
-        edge_conf['service_base_name'] = os.environ['conf_service_base_name'] = dlab.fab.replace_multi_symbols(
+        edge_conf['service_base_name'] = os.environ['conf_service_base_name'] = datalab.fab.replace_multi_symbols(
             os.environ['conf_service_base_name'][:20], '-', True)
         edge_conf['key_name'] = os.environ['conf_key_name']
         edge_conf['user_key'] = os.environ['key']
@@ -87,21 +87,21 @@ if __name__ == "__main__":
         tag = {"Key": edge_conf['tag_name'],
                "Value": "{}-{}-{}-subnet".format(edge_conf['service_base_name'], edge_conf['project_name'],
                                                  edge_conf['endpoint_name'])}
-        edge_conf['private_subnet_cidr'] = dlab.meta_lib.get_subnet_by_tag(tag)
-        edge_conf['dlab_ssh_user'] = os.environ['conf_os_user']
+        edge_conf['private_subnet_cidr'] = datalab.meta_lib.get_subnet_by_tag(tag)
+        edge_conf['datalab_ssh_user'] = os.environ['conf_os_user']
         edge_conf['network_type'] = os.environ['conf_network_type']
         if edge_conf['network_type'] == 'public':
-            edge_conf['edge_public_ip'] = dlab.meta_lib.get_instance_ip_address(edge_conf['tag_name'],
+            edge_conf['edge_public_ip'] = datalab.meta_lib.get_instance_ip_address(edge_conf['tag_name'],
                                                                   edge_conf['instance_name']).get('Public')
-            edge_conf['edge_private_ip'] = dlab.meta_lib.get_instance_ip_address(
+            edge_conf['edge_private_ip'] = datalab.meta_lib.get_instance_ip_address(
                 edge_conf['tag_name'], edge_conf['instance_name']).get('Private')
         elif edge_conf['network_type'] == 'private':
-            edge_conf['edge_private_ip'] = dlab.meta_lib.get_instance_ip_address(
+            edge_conf['edge_private_ip'] = datalab.meta_lib.get_instance_ip_address(
                 edge_conf['tag_name'], edge_conf['instance_name']).get('Private')
             edge_conf['edge_public_ip'] = edge_conf['edge_private_ip']
-        edge_conf['vpc1_cidrs'] = dlab.meta_lib.get_vpc_cidr_by_id(os.environ['aws_vpc_id'])
+        edge_conf['vpc1_cidrs'] = datalab.meta_lib.get_vpc_cidr_by_id(os.environ['aws_vpc_id'])
         try:
-            edge_conf['vpc2_cidrs'] = dlab.meta_lib.get_vpc_cidr_by_id(os.environ['aws_notebook_vpc_id'])
+            edge_conf['vpc2_cidrs'] = datalab.meta_lib.get_vpc_cidr_by_id(os.environ['aws_notebook_vpc_id'])
             edge_conf['vpc_cidrs'] = list(set(edge_conf['vpc1_cidrs'] + edge_conf['vpc2_cidrs']))
         except KeyError:
             edge_conf['vpc_cidrs'] = list(set(edge_conf['vpc1_cidrs']))
@@ -110,7 +110,7 @@ if __name__ == "__main__":
         for cidr in os.environ['conf_allowed_ip_cidr'].split(','):
             edge_conf['allowed_ip_cidr'].append(cidr.replace(' ', ''))
 
-        edge_conf['instance_hostname'] = dlab.meta_lib.get_instance_hostname(edge_conf['tag_name'],
+        edge_conf['instance_hostname'] = datalab.meta_lib.get_instance_hostname(edge_conf['tag_name'],
                                                                              edge_conf['instance_name'])
         edge_conf['keyfile_name'] = "{}{}.pem".format(os.environ['conf_key_dir'], edge_conf['key_name'])
 
@@ -118,7 +118,7 @@ if __name__ == "__main__":
             edge_conf['step_cert_sans'] = ' --san {0} '.format(edge_conf['edge_private_ip'])
             if edge_conf['network_type'] == 'public':
                 edge_conf['step_cert_sans'] += ' --san {0} --san {1}'.format(
-                    dlab.meta_lib.get_instance_hostname(edge_conf['tag_name'], edge_conf['instance_name']),
+                    datalab.meta_lib.get_instance_hostname(edge_conf['tag_name'], edge_conf['instance_name']),
                     edge_conf['edge_public_ip'])
         else:
             edge_conf['step_cert_sans'] = ''
@@ -129,16 +129,16 @@ if __name__ == "__main__":
             edge_conf['initial_user'] = 'ec2-user'
             edge_conf['sudo_group'] = 'wheel'
     except Exception as err:
-        dlab.fab.append_result("Failed to generate variables dictionary.", str(err))
+        datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
         clear_resources()
         sys.exit(1)
 
     try:
-        logging.info('[CREATING DLAB SSH USER]')
-        print('[CREATING DLAB SSH USER]')
+        logging.info('[CREATING DATA LAB SSH USER]')
+        print('[CREATING DATA LAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             edge_conf['instance_hostname'], os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
-            edge_conf['initial_user'], edge_conf['dlab_ssh_user'], edge_conf['sudo_group'])
+            edge_conf['initial_user'], edge_conf['datalab_ssh_user'], edge_conf['sudo_group'])
 
         try:
             local("~/scripts/{}.py {}".format('create_ssh_user', params))
@@ -146,7 +146,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed creating ssh user 'dlab'.", str(err))
+        datalab.fab.append_result("Failed creating ssh user 'datalab'.", str(err))
         clear_resources()
         sys.exit(1)
 
@@ -154,7 +154,7 @@ if __name__ == "__main__":
         print('[INSTALLING PREREQUISITES]')
         logging.info('[INSTALLING PREREQUISITES]')
         params = "--hostname {} --keyfile {} --user {} --region {}".\
-            format(edge_conf['instance_hostname'], edge_conf['keyfile_name'], edge_conf['dlab_ssh_user'],
+            format(edge_conf['instance_hostname'], edge_conf['keyfile_name'], edge_conf['datalab_ssh_user'],
                    os.environ['aws_region'])
         try:
             local("~/scripts/{}.py {}".format('install_prerequisites', params))
@@ -164,20 +164,20 @@ if __name__ == "__main__":
         print('RESTARTING EDGE NODE')
         try:
             print('Stoping EDGE node')
-            dlab.actions_lib.stop_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
+            datalab.actions_lib.stop_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
         except Exception as err:
             print('Error: {0}'.format(err))
-            dlab.fab.append_result("Failed to stop edge.", str(err))
+            datalab.fab.append_result("Failed to stop edge.", str(err))
             sys.exit(1)
         try:
             print('Starting EDGE node')
-            dlab.actions_lib.start_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
+            datalab.actions_lib.start_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
         except Exception as err:
             print('Error: {0}'.format(err))
-            dlab.fab.append_result("Failed to start edge.", str(err))
+            datalab.fab.append_result("Failed to start edge.", str(err))
             sys.exit(1)
     except Exception as err:
-        dlab.fab.append_result("Failed installing apps: apt & pip.", str(err))
+        datalab.fab.append_result("Failed installing apps: apt & pip.", str(err))
         clear_resources()
         sys.exit(1)
 
@@ -195,14 +195,14 @@ if __name__ == "__main__":
                              "allowed_ip_cidr": edge_conf['allowed_ip_cidr']}
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             edge_conf['instance_hostname'], edge_conf['keyfile_name'], json.dumps(additional_config),
-            edge_conf['dlab_ssh_user'])
+            edge_conf['datalab_ssh_user'])
         try:
             local("~/scripts/{}.py {}".format('configure_http_proxy', params))
         except:
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed installing http proxy.", str(err))
+        datalab.fab.append_result("Failed installing http proxy.", str(err))
         clear_resources()
         sys.exit(1)
 
@@ -215,14 +215,14 @@ if __name__ == "__main__":
                              "user_key": edge_conf['user_key']}
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             edge_conf['instance_hostname'], edge_conf['keyfile_name'], json.dumps(additional_config),
-            edge_conf['dlab_ssh_user'])
+            edge_conf['datalab_ssh_user'])
         try:
             local("~/scripts/{}.py {}".format('install_user_key', params))
         except:
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed installing users key." + str(err))
+        datalab.fab.append_result("Failed installing users key." + str(err))
         clear_resources()
         sys.exit(1)
 
@@ -232,7 +232,7 @@ if __name__ == "__main__":
         edge_conf['keycloak_client_secret'] = str(uuid.uuid4())
         params = "--hostname {} --keyfile {} --user {} --keycloak_client_id {} --keycloak_client_secret {} " \
                  "--step_cert_sans '{}' ".format(
-                  edge_conf['instance_hostname'], edge_conf['keyfile_name'], edge_conf['dlab_ssh_user'],
+                  edge_conf['instance_hostname'], edge_conf['keyfile_name'], edge_conf['datalab_ssh_user'],
                   '{}-{}-{}'.format(edge_conf['service_base_name'], edge_conf['project_name'],
                                     edge_conf['endpoint_name']),
                   edge_conf['keycloak_client_secret'], edge_conf['step_cert_sans'])
@@ -259,7 +259,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed installing nginx reverse proxy." + str(err))
+        datalab.fab.append_result("Failed installing nginx reverse proxy." + str(err))
         clear_resources()
         sys.exit(1)
 
@@ -271,14 +271,14 @@ if __name__ == "__main__":
                                  "edge_ip": edge_conf['edge_private_ip']}
             params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
                 edge_conf['instance_hostname'], edge_conf['keyfile_name'], json.dumps(additional_config),
-                edge_conf['dlab_ssh_user'])
+                edge_conf['datalab_ssh_user'])
             try:
                 local("~/scripts/{}.py {}".format('configure_nftables', params))
             except:
                 traceback.print_exc()
                 raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure NAT." + str(err))
+        datalab.fab.append_result("Failed to configure NAT." + str(err))
         clear_resources()
         sys.exit(1)
 
@@ -289,7 +289,7 @@ if __name__ == "__main__":
         print("Hostname: {}".format(edge_conf['instance_hostname']))
         print("Public IP: {}".format(edge_conf['edge_public_ip']))
         print("Private IP: {}".format(edge_conf['edge_private_ip']))
-        print("Instance ID: {}".format(dlab.meta_lib.get_instance_by_name(edge_conf['tag_name'],
+        print("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(edge_conf['tag_name'],
                                                                           edge_conf['instance_name'])))
         print("Key name: {}".format(edge_conf['key_name']))
         print("Bucket name: {}".format(edge_conf['bucket_name']))
@@ -302,7 +302,7 @@ if __name__ == "__main__":
             res = {"hostname": edge_conf['instance_hostname'],
                    "public_ip": edge_conf['edge_public_ip'],
                    "ip": edge_conf['edge_private_ip'],
-                   "instance_id": dlab.meta_lib.get_instance_by_name(edge_conf['tag_name'], edge_conf['instance_name']),
+                   "instance_id": datalab.meta_lib.get_instance_by_name(edge_conf['tag_name'], edge_conf['instance_name']),
                    "key_name": edge_conf['key_name'],
                    "user_own_bicket_name": edge_conf['bucket_name'],
                    "shared_bucket_name": edge_conf['shared_bucket_name'],
@@ -314,12 +314,12 @@ if __name__ == "__main__":
                    "notebook_subnet": edge_conf['private_subnet_cidr'],
                    "full_edge_conf": edge_conf,
                    "project_name": edge_conf['project_name'],
-                   "@class": "com.epam.dlab.dto.aws.edge.EdgeInfoAws",
+                   "@class": "com.epam.datalab.dto.aws.edge.EdgeInfoAws",
                    "Action": "Create new EDGE server"}
             print(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results.", str(err))
+        datalab.fab.append_result("Error with writing results.", str(err))
         clear_resources()
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_configure_route_table.py b/infrastructure-provisioning/src/general/scripts/aws/edge_configure_route_table.py
index c3ac2e4..20e8eed 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_configure_route_table.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_configure_route_table.py
@@ -22,8 +22,8 @@
 # ******************************************************************************
 
 import argparse
-from dlab.actions_lib import *
-from dlab.meta_lib import *
+from datalab.actions_lib import *
+from datalab.meta_lib import *
 
 
 parser = argparse.ArgumentParser()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_start.py b/infrastructure-provisioning/src/general/scripts/aws/edge_start.py
index a9f856a..2bdcc10 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_start.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_start.py
@@ -21,9 +21,9 @@
 #
 # ******************************************************************************
 
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import sys
 import logging
 import os
@@ -40,7 +40,7 @@ if __name__ == "__main__":
                         filename=local_log_filepath)
 
     # generating variables dictionary
-    dlab.actions_lib.create_aws_config_files()
+    datalab.actions_lib.create_aws_config_files()
     print('Generating infrastructure names and tags')
     edge_conf = dict()
     edge_conf['service_base_name'] = (os.environ['conf_service_base_name'])
@@ -53,15 +53,15 @@ if __name__ == "__main__":
     logging.info('[START EDGE]')
     print('[START EDGE]')
     try:
-        dlab.actions_lib.start_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
+        datalab.actions_lib.start_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
     except Exception as err:
         print('Error: {0}'.format(err))
-        dlab.fab.append_result("Failed to start edge.", str(err))
+        datalab.fab.append_result("Failed to start edge.", str(err))
         sys.exit(1)
 
     try:
-        instance_hostname = dlab.meta_lib.get_instance_hostname(edge_conf['tag_name'], edge_conf['instance_name'])
-        addresses = dlab.meta_lib.get_instance_ip_address(edge_conf['tag_name'], edge_conf['instance_name'])
+        instance_hostname = datalab.meta_lib.get_instance_hostname(edge_conf['tag_name'], edge_conf['instance_name'])
+        addresses = datalab.meta_lib.get_instance_ip_address(edge_conf['tag_name'], edge_conf['instance_name'])
         ip_address = addresses.get('Private')
         public_ip_address = addresses.get('Public')
         print('[SUMMARY]')
@@ -79,5 +79,5 @@ if __name__ == "__main__":
             print(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results", str(err))
+        datalab.fab.append_result("Error with writing results", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_status.py b/infrastructure-provisioning/src/general/scripts/aws/edge_status.py
index d8bd92e..53de3aa 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_status.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_status.py
@@ -23,9 +23,9 @@
 
 
 import json
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import sys
 import time
 import os
@@ -42,8 +42,8 @@ if __name__ == "__main__":
                         level=logging.DEBUG,
                         filename=local_log_filepath)
 
-    dlab.actions_lib.create_aws_config_files()
-    print('Getting statuses of DLAB resources')
+    datalab.actions_lib.create_aws_config_files()
+    print('Getting statuses of Data Lab resources')
 
     try:
         logging.info('[COLLECT DATA]')
@@ -55,5 +55,5 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to collect necessary information.", str(err))
+        datalab.fab.append_result("Failed to collect necessary information.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_stop.py b/infrastructure-provisioning/src/general/scripts/aws/edge_stop.py
index 3948781..6d8447b 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_stop.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_stop.py
@@ -21,9 +21,9 @@
 #
 # ******************************************************************************
 
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import sys
 import os
 import logging
@@ -39,7 +39,7 @@ if __name__ == "__main__":
                         filename=local_log_filepath)
 
     # generating variables dictionary
-    dlab.actions_lib.create_aws_config_files()
+    datalab.actions_lib.create_aws_config_files()
     print('Generating infrastructure names and tags')
     edge_conf = dict()
     edge_conf['service_base_name'] = (os.environ['conf_service_base_name'])
@@ -52,9 +52,9 @@ if __name__ == "__main__":
     logging.info('[STOP EDGE]')
     print('[STOP EDGE]')
     try:
-        dlab.actions_lib.stop_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
+        datalab.actions_lib.stop_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
     except Exception as err:
-        dlab.fab.append_result("Failed to stop edge.", str(err))
+        datalab.fab.append_result("Failed to stop edge.", str(err))
         sys.exit(1)
 
     try:
@@ -64,5 +64,5 @@ if __name__ == "__main__":
             print(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results", str(err))
+        datalab.fab.append_result("Error with writing results", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
index cc53b22..47962ac 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
@@ -25,9 +25,9 @@ import logging
 import json
 import sys
 import traceback
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import os
 import argparse
 from fabric.api import *
@@ -81,25 +81,25 @@ if __name__ == "__main__":
                                                                          notebook_config['project_name'],
                                                                          notebook_config['endpoint_name'])
         notebook_config['tag_name'] = '{}-tag'.format(notebook_config['service_base_name'])
-        notebook_config['dlab_ssh_user'] = os.environ['conf_os_user']
-        notebook_config['ip_address'] = dlab.meta_lib.get_instance_ip_address(
+        notebook_config['datalab_ssh_user'] = os.environ['conf_os_user']
+        notebook_config['ip_address'] = datalab.meta_lib.get_instance_ip_address(
             notebook_config['tag_name'], notebook_config['instance_name']).get('Private')
 
         # generating variables regarding EDGE proxy on Notebook instance
-        instance_hostname = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
+        instance_hostname = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
                                                                 notebook_config['instance_name'])
         edge_instance_name = '{}-{}-{}-edge'.format(notebook_config['service_base_name'],
                                                     notebook_config['project_name'], notebook_config['endpoint_name'])
-        edge_instance_hostname = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'], edge_instance_name)
-        edge_instance_private_ip = dlab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
+        edge_instance_hostname = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'], edge_instance_name)
+        edge_instance_private_ip = datalab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
                                                                          edge_instance_name).get('Private')
-        notebook_config['edge_instance_hostname'] = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
+        notebook_config['edge_instance_hostname'] = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
                                                                                         edge_instance_name)
         keyfile_name = "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
-        edge_ip = dlab.meta_lib.get_instance_ip_address(notebook_config['tag_name'], edge_instance_name).get('Private')
+        edge_ip = datalab.meta_lib.get_instance_ip_address(notebook_config['tag_name'], edge_instance_name).get('Private')
     except Exception as err:
-        dlab.fab.append_result("Failed to generate variables dictionary.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -110,11 +110,11 @@ if __name__ == "__main__":
             notebook_config['initial_user'] = 'ec2-user'
             notebook_config['sudo_group'] = 'wheel'
 
-        logging.info('[CREATING DLAB SSH USER]')
-        print('[CREATING DLAB SSH USER]')
+        logging.info('[CREATING DATA LAB SSH USER]')
+        print('[CREATING DATA LAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
-            notebook_config['initial_user'], notebook_config['dlab_ssh_user'], notebook_config['sudo_group'])
+            notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
             local("~/scripts/{}.py {}".format('create_ssh_user', params))
@@ -122,8 +122,8 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed creating ssh user 'dlab'.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed creating ssh user 'datalab'.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     # configuring proxy on Notebook instance
@@ -133,15 +133,15 @@ if __name__ == "__main__":
         additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}".format(
             instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
-            notebook_config['dlab_ssh_user'])
+            notebook_config['datalab_ssh_user'])
         try:
             local("~/scripts/{}.py {}".format('common_configure_proxy', params))
         except:
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure proxy.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to configure proxy.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     # updating repositories & installing python packages
@@ -149,7 +149,7 @@ if __name__ == "__main__":
         logging.info('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
         print('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}".format(
-            instance_hostname, keyfile_name, notebook_config['dlab_ssh_user'], os.environ['aws_region'],
+            instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
             edge_instance_private_ip)
         try:
             local("~/scripts/{}.py {}".format('install_prerequisites', params))
@@ -157,8 +157,8 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed installing apps: apt & pip.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed installing apps: apt & pip.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     # installing and configuring jupiter and all dependencies
@@ -181,7 +181,7 @@ if __name__ == "__main__":
                    os.environ['aws_region'],
                    os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'],
-                   notebook_config['dlab_ssh_user'],
+                   notebook_config['datalab_ssh_user'],
                    os.environ['notebook_scala_version'],
                    os.environ['notebook_r_mirror'],
                    notebook_config['ip_address'],
@@ -193,8 +193,8 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure jupyter.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to configure jupyter.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -203,31 +203,31 @@ if __name__ == "__main__":
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
-            instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['dlab_ssh_user'])
+            instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
             local("~/scripts/{}.py {}".format('install_user_key', params))
         except:
-            dlab.fab.append_result("Failed installing users key")
+            datalab.fab.append_result("Failed installing users key")
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed installing users key.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed installing users key.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
         print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
-            .format(notebook_config['dlab_ssh_user'], instance_hostname, keyfile_name)
+            .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
             local("~/scripts/{}.py {}".format('common_download_git_certfile', params))
             local("~/scripts/{}.py {}".format('manage_git_creds', params))
         except:
-            dlab.fab.append_result("Failed setup git credentials")
+            datalab.fab.append_result("Failed setup git credentials")
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to setup git credentials.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to setup git credentials.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -235,7 +235,7 @@ if __name__ == "__main__":
         print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None']:
             params = "--hostname {} --keyfile {} --os_user {} --nb_tag_name {} --nb_tag_value {}" \
-                .format(instance_hostname, keyfile_name, notebook_config['dlab_ssh_user'],
+                .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['tag_name'], notebook_config['instance_name'])
             try:
                 local("~/scripts/{}.py {}".format('common_remove_remote_kernels', params))
@@ -243,8 +243,8 @@ if __name__ == "__main__":
                 traceback.print_exc()
                 raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to post configuring instance.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to post configuring instance.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -262,24 +262,24 @@ if __name__ == "__main__":
                  "--additional_info '{}'"\
             .format(edge_instance_hostname,
                     keyfile_name,
-                    notebook_config['dlab_ssh_user'],
+                    notebook_config['datalab_ssh_user'],
                     'jupyter',
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
             local("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params))
         except:
-            dlab.fab.append_result("Failed edge reverse proxy template")
+            datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to set edge reverse proxy template.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to set edge reverse proxy template.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     if notebook_config['image_enabled'] == 'true':
         try:
             print('[CREATING AMI]')
-            ami_id = dlab.meta_lib.get_ami_id_by_name(notebook_config['expected_image_name'])
+            ami_id = datalab.meta_lib.get_ami_id_by_name(notebook_config['expected_image_name'])
             if ami_id == '' and notebook_config['shared_image_enabled'] == 'false':
                 print("Looks like it's first time we configure notebook server. Creating image.")
                 try:
@@ -288,7 +288,7 @@ if __name__ == "__main__":
                 except KeyError:
                     os.environ['conf_additional_tags'] = 'project_tag:{0};endpoint_tag:{1}'.format(
                         os.environ['project_name'], os.environ['endpoint_name'])
-                image_id = dlab.actions_lib.create_image_from_instance(
+                image_id = datalab.actions_lib.create_image_from_instance(
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
@@ -300,21 +300,21 @@ if __name__ == "__main__":
                         os.environ['conf_additional_tags'], os.environ['endpoint_name'])
                 except KeyError:
                     os.environ['conf_additional_tags'] = 'ami:shared;endpoint_tag:{}'.format(os.environ['endpoint_name'])
-                image_id = dlab.actions_lib.create_image_from_instance(
+                image_id = datalab.actions_lib.create_image_from_instance(
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
                     print("Image was successfully created. It's ID is {}".format(image_id))
         except Exception as err:
-            dlab.fab.append_result("Failed creating image.", str(err))
-            dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+            datalab.fab.append_result("Failed creating image.", str(err))
+            datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
             sys.exit(1)
 
     try:
         # generating output information
-        ip_address = dlab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
+        ip_address = datalab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
                                                            notebook_config['instance_name']).get('Private')
-        dns_name = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'], notebook_config['instance_name'])
+        dns_name = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'], notebook_config['instance_name'])
         jupyter_ip_url = "http://" + ip_address + ":8888/{}/".format(notebook_config['exploratory_name'])
         jupyter_dns_url = "http://" + dns_name + ":8888/{}/".format(notebook_config['exploratory_name'])
         jupyter_notebook_access_url = "https://{}/{}/".format(notebook_config['edge_instance_hostname'],
@@ -327,7 +327,7 @@ if __name__ == "__main__":
         print("Instance name: {}".format(notebook_config['instance_name']))
         print("Private DNS: {}".format(dns_name))
         print("Private IP: {}".format(ip_address))
-        print("Instance ID: {}".format(dlab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
+        print("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
                                                                           notebook_config['instance_name'])))
         print("Instance type: {}".format(notebook_config['instance_type']))
         print("Key name: {}".format(notebook_config['key_name']))
@@ -341,14 +341,14 @@ if __name__ == "__main__":
         print("ReverseProxyNotebook".format(jupyter_notebook_access_url))
         print("ReverseProxyUngit".format(jupyter_ungit_access_url))
         print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
-              format(notebook_config['key_name'], notebook_config['dlab_ssh_user'], ip_address))
+              format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
         print('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.
-              format(notebook_config['key_name'], notebook_config['dlab_ssh_user'], dns_name))
+              format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], dns_name))
 
         with open("/root/result.json", 'w') as result:
             res = {"hostname": dns_name,
                    "ip": ip_address,
-                   "instance_id": dlab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
+                   "instance_id": datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
                                                                      notebook_config['instance_name']),
                    "master_keyname": os.environ['conf_key_name'],
                    "notebook_name": notebook_config['instance_name'],
@@ -366,6 +366,6 @@ if __name__ == "__main__":
                    ]}
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Error with writing results.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
index 1bb09b3..a6f7b14 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
@@ -31,10 +31,10 @@ import time
 from fabric.api import lcd
 from fabric.contrib.files import exists
 from fabvenv import virtualenv
-from dlab.notebook_lib import *
-from dlab.actions_lib import *
-from dlab.fab import *
-from dlab.common_lib import *
+from datalab.notebook_lib import *
+from datalab.actions_lib import *
+from datalab.fab import *
+from datalab.common_lib import *
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--bucket', type=str, default='')
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
index 37d102a..146a1db 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
@@ -24,7 +24,7 @@
 import argparse
 from fabric.api import *
 import boto3
-from dlab.meta_lib import *
+from datalab.meta_lib import *
 import os
 
 parser = argparse.ArgumentParser()
@@ -61,13 +61,13 @@ def configure_notebook(args):
     #put(templates_dir + 'run_template.sh', '/tmp/run_template.sh')
     sudo('\cp /tmp/jupyter_dataengine-service_create_configs.py /usr/local/bin/jupyter_dataengine-service_create_configs.py')
     sudo('chmod 755 /usr/local/bin/jupyter_dataengine-service_create_configs.py')
-    sudo('mkdir -p /usr/lib/python2.7/dlab/')
-    run('mkdir -p /tmp/dlab_libs/')
-    local('scp -i {} /usr/lib/python2.7/dlab/* {}:/tmp/dlab_libs/'.format(args.keyfile, env.host_string))
-    run('chmod a+x /tmp/dlab_libs/*')
-    sudo('mv /tmp/dlab_libs/* /usr/lib/python2.7/dlab/')
+    sudo('mkdir -p /usr/lib/python2.7/datalab/')
+    run('mkdir -p /tmp/datalab_libs/')
+    local('scp -i {} /usr/lib/python2.7/datalab/* {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string))
+    run('chmod a+x /tmp/datalab_libs/*')
+    sudo('mv /tmp/datalab_libs/* /usr/lib/python2.7/datalab/')
     if exists('/usr/lib64'):
-        sudo('ln -fs /usr/lib/python2.7/dlab /usr/lib64/python2.7/dlab')
+        sudo('ln -fs /usr/lib/python2.7/datalab /usr/lib64/python2.7/datalab')
 
 
 if __name__ == "__main__":
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py b/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py
index 950afa1..4099cfe 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py
@@ -25,9 +25,9 @@ import logging
 import json
 import sys
 import traceback
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import os
 import argparse
 from fabric.api import *
@@ -81,25 +81,25 @@ if __name__ == "__main__":
                                                                          notebook_config['project_name'],
                                                                          notebook_config['endpoint_name'])
         notebook_config['tag_name'] = '{}-tag'.format(notebook_config['service_base_name'])
-        notebook_config['dlab_ssh_user'] = os.environ['conf_os_user']
-        notebook_config['ip_address'] = dlab.meta_lib.get_instance_ip_address(
+        notebook_config['datalab_ssh_user'] = os.environ['conf_os_user']
+        notebook_config['ip_address'] = datalab.meta_lib.get_instance_ip_address(
             notebook_config['tag_name'], notebook_config['instance_name']).get('Private')
 
         # generating variables regarding EDGE proxy on Notebook instance
-        instance_hostname = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
+        instance_hostname = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
                                                                 notebook_config['instance_name'])
         edge_instance_name = '{}-{}-{}-edge'.format(notebook_config['service_base_name'],
                                                     notebook_config['project_name'], notebook_config['endpoint_name'])
-        edge_instance_hostname = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'], edge_instance_name)
-        edge_instance_private_ip = dlab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
+        edge_instance_hostname = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'], edge_instance_name)
+        edge_instance_private_ip = datalab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
                                                                          edge_instance_name).get('Private')
-        notebook_config['edge_instance_hostname'] = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
+        notebook_config['edge_instance_hostname'] = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
                                                                                         edge_instance_name)
         keyfile_name = "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
-        edge_ip = dlab.meta_lib.get_instance_ip_address(notebook_config['tag_name'], edge_instance_name).get('Private')
+        edge_ip = datalab.meta_lib.get_instance_ip_address(notebook_config['tag_name'], edge_instance_name).get('Private')
     except Exception as err:
-        dlab.fab.append_result("Failed to generate variables dictionary.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -110,11 +110,11 @@ if __name__ == "__main__":
             notebook_config['initial_user'] = 'ec2-user'
             notebook_config['sudo_group'] = 'wheel'
 
-        logging.info('[CREATING DLAB SSH USER]')
-        print('[CREATING DLAB SSH USER]')
+        logging.info('[CREATING DATA LAB SSH USER]')
+        print('[CREATING DATA LAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format\
             (instance_hostname,  "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
-             notebook_config['initial_user'], notebook_config['dlab_ssh_user'], notebook_config['sudo_group'])
+             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
             local("~/scripts/{}.py {}".format('create_ssh_user', params))
@@ -122,8 +122,8 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed creating ssh user 'dlab'.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed creating ssh user 'datalab'.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     # configuring proxy on Notebook instance
@@ -133,15 +133,15 @@ if __name__ == "__main__":
         additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
-                    notebook_config['dlab_ssh_user'])
+                    notebook_config['datalab_ssh_user'])
         try:
             local("~/scripts/{}.py {}".format('common_configure_proxy', params))
         except:
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure proxy.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to configure proxy.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     # updating repositories & installing python packages
@@ -149,15 +149,15 @@ if __name__ == "__main__":
         logging.info('[INSTALLING PREREQUISITES TO JUPYTERLAB NOTEBOOK INSTANCE]')
         print('[INSTALLING PREREQUISITES TO JUPYTERLAB NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {}".\
-            format(instance_hostname, keyfile_name, notebook_config['dlab_ssh_user'], os.environ['aws_region'])
+            format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'])
         try:
             local("~/scripts/{}.py {}".format('install_prerequisites', params))
         except:
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed installing apps: apt & pip.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed installing apps: apt & pip.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     # installing and configuring jupiter and all dependencies
@@ -181,7 +181,7 @@ if __name__ == "__main__":
                    os.environ['aws_region'],
                    os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'],
-                   notebook_config['dlab_ssh_user'],
+                   notebook_config['datalab_ssh_user'],
                    os.environ['notebook_scala_version'],
                    os.environ['notebook_r_mirror'],
                    notebook_config['ip_address'],
@@ -192,8 +192,8 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure jupyterlab.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to configure jupyterlab.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -202,31 +202,31 @@ if __name__ == "__main__":
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
-            instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['dlab_ssh_user'])
+            instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
             local("~/scripts/{}.py {}".format('install_user_key', params))
         except:
-            dlab.fab.append_result("Failed installing users key")
+            datalab.fab.append_result("Failed installing users key")
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed installing users key.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed installing users key.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
         print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
-            .format(notebook_config['dlab_ssh_user'], instance_hostname, keyfile_name)
+            .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
             local("~/scripts/{}.py {}".format('common_download_git_certfile', params))
             local("~/scripts/{}.py {}".format('manage_git_creds', params))
         except:
-            dlab.fab.append_result("Failed setup git credentials")
+            datalab.fab.append_result("Failed setup git credentials")
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to setup git credentials.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to setup git credentials.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -234,7 +234,7 @@ if __name__ == "__main__":
         print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None']:
             params = "--hostname {} --keyfile {} --os_user {} --nb_tag_name {} --nb_tag_value {}" \
-                .format(instance_hostname, keyfile_name, notebook_config['dlab_ssh_user'],
+                .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['tag_name'], notebook_config['instance_name'])
             try:
                 local("~/scripts/{}.py {}".format('common_remove_remote_kernels', params))
@@ -242,8 +242,8 @@ if __name__ == "__main__":
                 traceback.print_exc()
                 raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to post configuring instance.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to post configuring instance.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -261,34 +261,34 @@ if __name__ == "__main__":
                  "--additional_info '{}'"\
             .format(edge_instance_hostname,
                     keyfile_name,
-                    notebook_config['dlab_ssh_user'],
+                    notebook_config['datalab_ssh_user'],
                     'jupyter',
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
             local("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params))
         except:
-            dlab.fab.append_result("Failed edge reverse proxy template")
+            datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to set edge reverse proxy template.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to set edge reverse proxy template.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     if notebook_config['image_enabled'] == 'true':
         try:
             print('[CREATING AMI]')
-            ami_id = dlab.meta_lib.get_ami_id_by_name(notebook_config['expected_image_name'])
+            ami_id = datalab.meta_lib.get_ami_id_by_name(notebook_config['expected_image_name'])
             if ami_id == '':
                 print("Looks like it's first time we configure notebook server. Creating image.")
-                image_id = dlab.actions_lib.create_image_from_instance(
+                image_id = datalab.actions_lib.create_image_from_instance(
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
                     print("Image was successfully created. It's ID is {}".format(image_id))
         except Exception as err:
-            dlab.fab.append_result("Failed creating image.", str(err))
-            dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+            datalab.fab.append_result("Failed creating image.", str(err))
+            datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
             sys.exit(1)
 
     try:
@@ -299,15 +299,15 @@ if __name__ == "__main__":
                  "--os_user {} ". \
             format(instance_hostname,
                    keyfile_name,
-                   notebook_config['dlab_ssh_user'])
+                   notebook_config['datalab_ssh_user'])
         try:
             local("~/scripts/configure_proxy_for_docker.py {}".format(params))
         except:
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure proxy for docker.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to configure proxy for docker.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -318,22 +318,22 @@ if __name__ == "__main__":
                  "--os_user {} ". \
             format(instance_hostname,
                    keyfile_name,
-                   notebook_config['dlab_ssh_user'])
+                   notebook_config['datalab_ssh_user'])
         try:
            local("~/scripts/jupyterlab_container_start.py {}".format(params))
         except:
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to start Jupyter container.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to start Jupyter container.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
         # generating output information
-        ip_address = dlab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
+        ip_address = datalab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
                                                            notebook_config['instance_name']).get('Private')
-        dns_name = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'], notebook_config['instance_name'])
+        dns_name = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'], notebook_config['instance_name'])
         jupyter_ip_url = "http://" + ip_address + ":8888/{}/".format(notebook_config['exploratory_name'])
         jupyter_dns_url = "http://" + dns_name + ":8888/{}/".format(notebook_config['exploratory_name'])
         jupyter_notebook_acces_url = "http://{}/{}/".format(notebook_config['edge_instance_hostname'],
@@ -346,7 +346,7 @@ if __name__ == "__main__":
         print("Instance name: {}".format(notebook_config['instance_name']))
         print("Private DNS: {}".format(dns_name))
         print("Private IP: {}".format(ip_address))
-        print("Instance ID: {}".format(dlab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
+        print("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
                                                                           notebook_config['instance_name'])))
         print("Instance type: {}".format(notebook_config['instance_type']))
         print("Key name: {}".format(notebook_config['key_name']))
@@ -360,14 +360,14 @@ if __name__ == "__main__":
         print("ReverseProxyNotebook".format(jupyter_notebook_acces_url))
         print("ReverseProxyUngit".format(jupyter_ungit_acces_url))
         print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
-              format(notebook_config['key_name'], notebook_config['dlab_ssh_user'], ip_address))
+              format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
         print('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.
-              format(notebook_config['key_name'], notebook_config['dlab_ssh_user'], dns_name))
+              format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], dns_name))
 
         with open("/root/result.json", 'w') as result:
             res = {"hostname": dns_name,
                    "ip": ip_address,
-                   "instance_id": dlab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
+                   "instance_id": datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
                                                                      notebook_config['instance_name']),
                    "master_keyname": os.environ['conf_key_name'],
                    "notebook_name": notebook_config['instance_name'],
@@ -385,6 +385,6 @@ if __name__ == "__main__":
                    ]}
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Error with writing results.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py
index 9986ab2..4f435dd 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py
@@ -22,9 +22,9 @@
 # ******************************************************************************
 
 import json
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import sys
 import time
 import os
@@ -43,10 +43,10 @@ if __name__ == "__main__":
                         filename=local_log_filepath)
 
     try:
-        dlab.actions_lib.create_aws_config_files()
+        datalab.actions_lib.create_aws_config_files()
         print('Generating infrastructure names and tags')
         project_conf = dict()
-        project_conf['service_base_name'] = os.environ['conf_service_base_name'] = dlab.fab.replace_multi_symbols(
+        project_conf['service_base_name'] = os.environ['conf_service_base_name'] = datalab.fab.replace_multi_symbols(
             os.environ['conf_service_base_name'][:20], '-', True)
         project_conf['endpoint_name'] = os.environ['endpoint_name']
         project_conf['endpoint_tag'] = project_conf['endpoint_name']
@@ -56,7 +56,7 @@ if __name__ == "__main__":
         project_conf['public_subnet_id'] = os.environ['aws_subnet_id']
         project_conf['vpc_id'] = os.environ['aws_vpc_id']
         project_conf['region'] = os.environ['aws_region']
-        project_conf['ami_id'] = dlab.meta_lib.get_ami_id(os.environ['aws_{}_image_name'.format(
+        project_conf['ami_id'] = datalab.meta_lib.get_ami_id(os.environ['aws_{}_image_name'.format(
             os.environ['conf_os_family'])])
         project_conf['instance_size'] = os.environ['aws_edge_instance_size']
         project_conf['sg_ids'] = os.environ['aws_security_groups_ids']
@@ -118,11 +118,11 @@ if __name__ == "__main__":
         project_conf['provision_instance_ip'] = None
         project_conf['local_endpoint'] = False
         try:
-            project_conf['provision_instance_ip'] = '{}/32'.format(dlab.meta_lib.get_instance_ip_address(
+            project_conf['provision_instance_ip'] = '{}/32'.format(datalab.meta_lib.get_instance_ip_address(
                 project_conf['tag_name'], '{0}-{1}-endpoint'.format(project_conf['service_base_name'],
                                                                     project_conf['endpoint_name'])).get('Private'))
         except:
-            project_conf['provision_instance_ip'] = '{}/32'.format(dlab.meta_lib.get_instance_ip_address(
+            project_conf['provision_instance_ip'] = '{}/32'.format(datalab.meta_lib.get_instance_ip_address(
                 project_conf['tag_name'], '{0}-ssn'.format(project_conf['service_base_name'])).get('Private'))
             project_conf['local_endpoint'] = True
         if 'aws_user_predefined_s3_policies' not in os.environ:
@@ -161,13 +161,13 @@ if __name__ == "__main__":
                                                                                            project_conf['endpoint_tag'])
         print('Additional tags will be added: {}'.format(os.environ['conf_additional_tags']))
     except Exception as err:
-        dlab.fab.append_result("Failed to generate variables dictionary.", str(err))
+        datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
         sys.exit(1)
 
     if not project_conf['local_endpoint']:
         # attach project_tag and endpoint_tag to endpoint
         try:
-            endpoint_id = dlab.meta_lib.get_instance_by_name(project_conf['tag_name'], '{0}-{1}-endpoint'.format(
+            endpoint_id = datalab.meta_lib.get_instance_by_name(project_conf['tag_name'], '{0}-{1}-endpoint'.format(
                 project_conf['service_base_name'], project_conf['endpoint_name']))
             print("Endpoint id: " + endpoint_id)
             ec2 = boto3.client('ec2')
@@ -202,14 +202,14 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to create subnet.", str(err))
+        datalab.fab.append_result("Failed to create subnet.", str(err))
         sys.exit(1)
 
     tag = {"Key": project_conf['tag_name'],
            "Value": "{0}-{1}-{2}-subnet".format(project_conf['service_base_name'], project_conf['project_name'],
                                                 project_conf['endpoint_name'])}
-    project_conf['private_subnet_cidr'] = dlab.meta_lib.get_subnet_by_tag(tag)
-    subnet_id = dlab.meta_lib.get_subnet_by_cidr(project_conf['private_subnet_cidr'], project_conf['vpc2_id'])
+    project_conf['private_subnet_cidr'] = datalab.meta_lib.get_subnet_by_tag(tag)
+    subnet_id = datalab.meta_lib.get_subnet_by_cidr(project_conf['private_subnet_cidr'], project_conf['vpc2_id'])
     print('Subnet id: {}'.format(subnet_id))
     print('NEW SUBNET CIDR CREATED: {}'.format(project_conf['private_subnet_cidr']))
 
@@ -229,7 +229,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to creating roles.", str(err))
+        datalab.fab.append_result("Failed to creating roles.", str(err))
         sys.exit(1)
 
     try:
@@ -249,14 +249,14 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to creating roles.", str(err))
-        dlab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
+        datalab.fab.append_result("Failed to creating roles.", str(err))
+        datalab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
         sys.exit(1)
 
     try:
         logging.info('[CREATE SECURITY GROUP FOR EDGE NODE]')
         print('[CREATE SECURITY GROUPS FOR EDGE]')
-        edge_sg_ingress = dlab.meta_lib.format_sg([
+        edge_sg_ingress = datalab.meta_lib.format_sg([
             {
                 "IpProtocol": "-1",
                 "IpRanges": [{"CidrIp": project_conf['private_subnet_cidr']}],
@@ -293,7 +293,7 @@ if __name__ == "__main__":
                 "PrefixListIds": []
             }
         ])
-        edge_sg_egress = dlab.meta_lib.format_sg([
+        edge_sg_egress = datalab.meta_lib.format_sg([
             {
                 "PrefixListIds": [],
                 "FromPort": 22,
@@ -418,26 +418,26 @@ if __name__ == "__main__":
             local("~/scripts/{}.py {}".format('common_create_security_group', params))
         except Exception as err:
             traceback.print_exc()
-            dlab.fab.append_result("Failed creating security group for edge node.", str(err))
+            datalab.fab.append_result("Failed creating security group for edge node.", str(err))
             raise Exception
 
         with hide('stderr', 'running', 'warnings'):
             print('Waiting for changes to propagate')
             time.sleep(10)
     except:
-        dlab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
-        dlab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
+        datalab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
+        datalab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
         sys.exit(1)
 
     try:
         logging.info('[CREATE SECURITY GROUP FOR PRIVATE SUBNET]')
         print('[CREATE SECURITY GROUP FOR PRIVATE SUBNET]')
-        project_group_id = dlab.meta_lib.check_security_group(project_conf['edge_security_group_name'])
+        project_group_id = datalab.meta_lib.check_security_group(project_conf['edge_security_group_name'])
         sg_list = project_conf['sg_ids'].replace(" ", "").split(',')
         rules_list = []
         for i in sg_list:
             rules_list.append({"GroupId": i})
-        private_sg_ingress = dlab.meta_lib.format_sg([
+        private_sg_ingress = datalab.meta_lib.format_sg([
             {
                 "IpProtocol": "-1",
                 "IpRanges": [],
@@ -458,7 +458,7 @@ if __name__ == "__main__":
             }
         ])
 
-        private_sg_egress = dlab.meta_lib.format_sg([
+        private_sg_egress = datalab.meta_lib.format_sg([
             {
                 "IpProtocol": "-1",
                 "IpRanges": [{"CidrIp": project_conf['private_subnet_cidr']}],
@@ -503,11 +503,11 @@ if __name__ == "__main__":
             print('Waiting for changes to propagate')
             time.sleep(10)
     except Exception as err:
-        dlab.fab.append_result("Failed creating security group for private subnet.", str(err))
-        dlab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
-        dlab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
+        datalab.fab.append_result("Failed creating security group for private subnet.", str(err))
+        datalab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
+        datalab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
         sys.exit(1)
 
     logging.info('[CREATING SECURITY GROUPS FOR MASTER NODE]')
@@ -525,11 +525,11 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to create sg.", str(err))
-        dlab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
-        dlab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
+        datalab.fab.append_result("Failed to create sg.", str(err))
+        datalab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
+        datalab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
         sys.exit(1)
 
     logging.info('[CREATING SECURITY GROUPS FOR SLAVE NODES]')
@@ -547,12 +547,12 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to create security group.", str(err))
-        dlab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
-        dlab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['dataengine_instances_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
+        datalab.fab.append_result("Failed to create security group.", str(err))
+        datalab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
+        datalab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['dataengine_instances_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
         sys.exit(1)
 
     try:
@@ -583,12 +583,12 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to create buckets.", str(err))
-        dlab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
-        dlab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['dataengine_instances_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
+        datalab.fab.append_result("Failed to create buckets.", str(err))
+        datalab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
+        datalab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['dataengine_instances_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
         sys.exit(1)
 
     try:
@@ -607,13 +607,13 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to create bucket policy.", str(err))
-        dlab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
-        dlab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['dataengine_instances_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
-        dlab.actions_lib.remove_s3('edge', project_conf['project_name'])
+        datalab.fab.append_result("Failed to create bucket policy.", str(err))
+        datalab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
+        datalab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['dataengine_instances_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
+        datalab.actions_lib.remove_s3('edge', project_conf['project_name'])
         sys.exit(1)
 
     try:
@@ -627,11 +627,11 @@ if __name__ == "__main__":
                     project_conf['edge_instance_name'])
         try:
             local("~/scripts/{}.py {}".format('common_create_instance', params))
-            edge_instance = dlab.meta_lib.get_instance_by_name(project_conf['tag_name'],
+            edge_instance = datalab.meta_lib.get_instance_by_name(project_conf['tag_name'],
                                                                project_conf['edge_instance_name'])
             if os.environ['edge_is_nat']:
                 try:
-                    dlab.actions_lib.modify_instance_sourcedescheck(edge_instance)
+                    datalab.actions_lib.modify_instance_sourcedescheck(edge_instance)
                 except:
                     traceback.print_exc()
                     raise Exception
@@ -639,20 +639,20 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to create instance.", str(err))
-        dlab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
-        dlab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['dataengine_instances_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
-        dlab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
-        dlab.actions_lib.remove_s3('edge', project_conf['project_name'])
+        datalab.fab.append_result("Failed to create instance.", str(err))
+        datalab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
+        datalab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['dataengine_instances_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
+        datalab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
+        datalab.actions_lib.remove_s3('edge', project_conf['project_name'])
         sys.exit(1)
 
     if project_conf['network_type'] == 'public':
         try:
             logging.info('[ASSOCIATING ELASTIC IP]')
             print('[ASSOCIATING ELASTIC IP]')
-            project_conf['edge_id'] = dlab.meta_lib.get_instance_by_name(project_conf['tag_name'],
+            project_conf['edge_id'] = datalab.meta_lib.get_instance_by_name(project_conf['tag_name'],
                                                                          project_conf['edge_instance_name'])
             try:
                 project_conf['elastic_ip'] = os.environ['edge_elastic_ip']
@@ -667,21 +667,21 @@ if __name__ == "__main__":
                 traceback.print_exc()
                 raise Exception
         except Exception as err:
-            dlab.fab.append_result("Failed to associate elastic ip.", str(err))
+            datalab.fab.append_result("Failed to associate elastic ip.", str(err))
             try:
-                project_conf['edge_public_ip'] = dlab.meta_lib.get_instance_ip_address(
+                project_conf['edge_public_ip'] = datalab.meta_lib.get_instance_ip_address(
                     project_conf['tag_name'], project_conf['edge_instance_name']).get('Public')
-                project_conf['allocation_id'] = dlab.meta_lib.get_allocation_id_by_elastic_ip(
+                project_conf['allocation_id'] = datalab.meta_lib.get_allocation_id_by_elastic_ip(
                     project_conf['edge_public_ip'])
             except:
                 print("No Elastic IPs to release!")
-            dlab.actions_lib.remove_ec2(project_conf['tag_name'], project_conf['edge_instance_name'])
-            dlab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
-            dlab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
-            dlab.actions_lib.remove_sgroups(project_conf['dataengine_instances_name'])
-            dlab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
-            dlab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
-            dlab.actions_lib.remove_s3('edge', project_conf['project_name'])
+            datalab.actions_lib.remove_ec2(project_conf['tag_name'], project_conf['edge_instance_name'])
+            datalab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
+            datalab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
+            datalab.actions_lib.remove_sgroups(project_conf['dataengine_instances_name'])
+            datalab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
+            datalab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
+            datalab.actions_lib.remove_s3('edge', project_conf['project_name'])
             sys.exit(1)
 
     if os.environ['edge_is_nat'] == 'true':
@@ -698,19 +698,19 @@ if __name__ == "__main__":
                 traceback.print_exc()
                 raise Exception
         except Exception as err:
-            dlab.fab.append_result("Failed to configure route table.", str(err))
+            datalab.fab.append_result("Failed to configure route table.", str(err))
             try:
-                project_conf['edge_public_ip'] = dlab.meta_lib.get_instance_ip_address(
+                project_conf['edge_public_ip'] = datalab.meta_lib.get_instance_ip_address(
                     project_conf['tag_name'], project_conf['edge_instance_name']).get('Public')
-                project_conf['allocation_id'] = dlab.meta_lib.get_allocation_id_by_elastic_ip(
+                project_conf['allocation_id'] = datalab.meta_lib.get_allocation_id_by_elastic_ip(
                     project_conf['edge_public_ip'])
             except:
                 print("No Elastic IPs to release!")
-            dlab.actions_lib.remove_ec2(project_conf['tag_name'], project_conf['edge_instance_name'])
-            dlab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
-            dlab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
-            dlab.actions_lib.remove_sgroups(project_conf['dataengine_instances_name'])
-            dlab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
-            dlab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
-            dlab.actions_lib.remove_s3('edge', project_conf['project_name'])
+            datalab.actions_lib.remove_ec2(project_conf['tag_name'], project_conf['edge_instance_name'])
+            datalab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
+            datalab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
+            datalab.actions_lib.remove_sgroups(project_conf['dataengine_instances_name'])
+            datalab.actions_lib.remove_sgroups(project_conf['notebook_instance_name'])
+            datalab.actions_lib.remove_sgroups(project_conf['edge_instance_name'])
+            datalab.actions_lib.remove_s3('edge', project_conf['project_name'])
             sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/aws/project_terminate.py b/infrastructure-provisioning/src/general/scripts/aws/project_terminate.py
index 9243ff5..6d0db7f 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/project_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/project_terminate.py
@@ -22,9 +22,9 @@
 # ******************************************************************************
 
 import json
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import sys
 import time
 import os
@@ -37,7 +37,7 @@ import requests
 def terminate_edge_node(tag_name, project_name, tag_value, nb_sg, edge_sg, de_sg, emr_sg, endpoint_name, service_base_name):
     print('Terminating EMR cluster')
     try:
-        clusters_list = dlab.meta_lib.get_emr_list(tag_name)
+        clusters_list = datalab.meta_lib.get_emr_list(tag_name)
         if clusters_list:
             for cluster_id in clusters_list:
                 client = boto3.client('emr')
@@ -45,58 +45,58 @@ def terminate_edge_node(tag_name, project_name, tag_value, nb_sg, edge_sg, de_sg
                 cluster = cluster.get("Cluster")
                 emr_name = cluster.get('Name')
                 if '{}'.format(tag_value[:-1]) in emr_name:
-                    dlab.actions_lib.terminate_emr(cluster_id)
+                    datalab.actions_lib.terminate_emr(cluster_id)
                     print("The EMR cluster {} has been terminated successfully".format(emr_name))
         else:
             print("There are no EMR clusters to terminate.")
     except Exception as err:
-        dlab.fab.append_result("Failed to terminate EMR cluster.", str(err))
+        datalab.fab.append_result("Failed to terminate EMR cluster.", str(err))
         sys.exit(1)
 
     print("Terminating EDGE and notebook instances")
     try:
-        dlab.actions_lib.remove_ec2(tag_name, tag_value)
+        datalab.actions_lib.remove_ec2(tag_name, tag_value)
     except Exception as err:
-        dlab.fab.append_result("Failed to terminate instances.", str(err))
+        datalab.fab.append_result("Failed to terminate instances.", str(err))
         sys.exit(1)
 
     print("Removing s3 bucket")
     try:
-        dlab.actions_lib.remove_s3('edge', project_name)
+        datalab.actions_lib.remove_s3('edge', project_name)
     except Exception as err:
-        dlab.fab.append_result("Failed to remove buckets.", str(err))
+        datalab.fab.append_result("Failed to remove buckets.", str(err))
         sys.exit(1)
 
     print("Removing IAM roles and profiles")
     try:
-        dlab.actions_lib.remove_all_iam_resources('notebook', project_name, endpoint_name)
-        dlab.actions_lib.remove_all_iam_resources('edge', project_name, endpoint_name)
+        datalab.actions_lib.remove_all_iam_resources('notebook', project_name, endpoint_name)
+        datalab.actions_lib.remove_all_iam_resources('edge', project_name, endpoint_name)
     except Exception as err:
-        dlab.fab.append_result("Failed to remove IAM roles and profiles.", str(err))
+        datalab.fab.append_result("Failed to remove IAM roles and profiles.", str(err))
         sys.exit(1)
 
     print("Deregistering project specific notebook's AMI")
     try:
-        dlab.actions_lib.deregister_image('{}-{}-{}-*'.format(service_base_name, project_name, endpoint_name))
+        datalab.actions_lib.deregister_image('{}-{}-{}-*'.format(service_base_name, project_name, endpoint_name))
     except Exception as err:
-        dlab.fab.append_result("Failed to deregister images.", str(err))
+        datalab.fab.append_result("Failed to deregister images.", str(err))
         sys.exit(1)
 
     print("Removing security groups")
     try:
-        dlab.actions_lib.remove_sgroups(emr_sg)
-        dlab.actions_lib.remove_sgroups(de_sg)
-        dlab.actions_lib.remove_sgroups(nb_sg)
-        dlab.actions_lib.remove_sgroups(edge_sg)
+        datalab.actions_lib.remove_sgroups(emr_sg)
+        datalab.actions_lib.remove_sgroups(de_sg)
+        datalab.actions_lib.remove_sgroups(nb_sg)
+        datalab.actions_lib.remove_sgroups(edge_sg)
     except Exception as err:
-        dlab.fab.append_result("Failed to remove Security Groups.", str(err))
+        datalab.fab.append_result("Failed to remove Security Groups.", str(err))
         sys.exit(1)
 
     print("Removing private subnet")
     try:
-        dlab.actions_lib.remove_subnets(tag_value)
+        datalab.actions_lib.remove_subnets(tag_value)
     except Exception as err:
-        dlab.fab.append_result("Failed to remove subnets.", str(err))
+        datalab.fab.append_result("Failed to remove subnets.", str(err))
         sys.exit(1)
 
 
@@ -109,7 +109,7 @@ if __name__ == "__main__":
                         filename=local_log_filepath)
 
     # generating variables dictionary
-    dlab.actions_lib.create_aws_config_files()
+    datalab.actions_lib.create_aws_config_files()
     print('Generating infrastructure names and tags')
     project_conf = dict()
     project_conf['service_base_name'] = (os.environ['conf_service_base_name'])
@@ -143,13 +143,13 @@ if __name__ == "__main__":
                                 project_conf['emr_sg'], project_conf['endpoint_name'], project_conf['service_base_name'])
         except Exception as err:
             traceback.print_exc()
-            dlab.fab.append_result("Failed to terminate project.", str(err))
+            datalab.fab.append_result("Failed to terminate project.", str(err))
     except Exception as err:
         print('Error: {0}'.format(err))
         sys.exit(1)
 
     try:
-        endpoint_id = dlab.meta_lib.get_instance_by_name(project_conf['tag_name'],
+        endpoint_id = datalab.meta_lib.get_instance_by_name(project_conf['tag_name'],
                                                          project_conf['endpoint_instance_name'])
         print("Endpoint id: " + endpoint_id)
         ec2 = boto3.client('ec2')
@@ -206,5 +206,5 @@ if __name__ == "__main__":
             print(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results", str(err))
+        datalab.fab.append_result("Error with writing results", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
index dd2a93c..7d8268d 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
@@ -25,9 +25,9 @@ import logging
 import json
 import sys
 import traceback
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import os
 import argparse
 from fabric.api import *
@@ -82,26 +82,26 @@ if __name__ == "__main__":
                                                                          notebook_config['project_name'],
                                                                          notebook_config['endpoint_name'])
         notebook_config['tag_name'] = '{}-tag'.format(notebook_config['service_base_name'])
-        notebook_config['dlab_ssh_user'] = os.environ['conf_os_user']
-        notebook_config['ip_address'] = dlab.meta_lib.get_instance_ip_address(
+        notebook_config['datalab_ssh_user'] = os.environ['conf_os_user']
+        notebook_config['ip_address'] = datalab.meta_lib.get_instance_ip_address(
             notebook_config['tag_name'], notebook_config['instance_name']).get('Private')
 
         # generating variables regarding EDGE proxy on Notebook instance
-        instance_hostname = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
+        instance_hostname = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
                                                                 notebook_config['instance_name'])
         edge_instance_name = '{}-{}-{}-edge'.format(notebook_config['service_base_name'],
                                                     notebook_config['project_name'], notebook_config['endpoint_name'])
-        edge_instance_hostname = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'], edge_instance_name)
-        edge_instance_private_ip = dlab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
+        edge_instance_hostname = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'], edge_instance_name)
+        edge_instance_private_ip = datalab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
                                                                          edge_instance_name).get('Private')
-        notebook_config['edge_instance_hostname'] = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
+        notebook_config['edge_instance_hostname'] = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
                                                                                         edge_instance_name)
         keyfile_name = "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
-        edge_ip = dlab.meta_lib.get_instance_ip_address(notebook_config['tag_name'], edge_instance_name).get('Private')
-        notebook_config['rstudio_pass'] = dlab.fab.id_generator()
+        edge_ip = datalab.meta_lib.get_instance_ip_address(notebook_config['tag_name'], edge_instance_name).get('Private')
+        notebook_config['rstudio_pass'] = datalab.fab.id_generator()
     except Exception as err:
-        dlab.fab.append_result("Failed to generate variables dictionary.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -112,11 +112,11 @@ if __name__ == "__main__":
             notebook_config['initial_user'] = 'ec2-user'
             notebook_config['sudo_group'] = 'wheel'
 
-        logging.info('[CREATING DLAB SSH USER]')
-        print('[CREATING DLAB SSH USER]')
+        logging.info('[CREATING DATA LAB SSH USER]')
+        print('[CREATING DATA LAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
-            notebook_config['initial_user'], notebook_config['dlab_ssh_user'], notebook_config['sudo_group'])
+            notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
             local("~/scripts/{}.py {}".format('create_ssh_user', params))
@@ -124,8 +124,8 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed creating ssh user 'dlab'.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed creating ssh user 'datalab'.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     # configuring proxy on Notebook instance
@@ -135,15 +135,15 @@ if __name__ == "__main__":
         additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
-                    notebook_config['dlab_ssh_user'])
+                    notebook_config['datalab_ssh_user'])
         try:
             local("~/scripts/{}.py {}".format('common_configure_proxy', params))
         except:
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure proxy.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to configure proxy.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     # updating repositories & installing python packages
@@ -151,7 +151,7 @@ if __name__ == "__main__":
         logging.info('[INSTALLING PREREQUISITES TO R_STUDIO NOTEBOOK INSTANCE]')
         print('[INSTALLING PREREQUISITES TO R_STUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}".\
-            format(instance_hostname, keyfile_name, notebook_config['dlab_ssh_user'], os.environ['aws_region'],
+            format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
                    edge_instance_private_ip)
         try:
             local("~/scripts/{}.py {}".format('install_prerequisites', params))
@@ -159,8 +159,8 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed installing apps: apt & pip.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed installing apps: apt & pip.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     # installing and configuring R_STUDIO and all dependencies
@@ -173,7 +173,7 @@ if __name__ == "__main__":
                  "--r_mirror {6} --ip_address {7} --exploratory_name {8} --edge_ip {9}" \
             .format(instance_hostname, keyfile_name,
                     os.environ['aws_region'], notebook_config['rstudio_pass'],
-                    os.environ['notebook_rstudio_version'], notebook_config['dlab_ssh_user'],
+                    os.environ['notebook_rstudio_version'], notebook_config['datalab_ssh_user'],
                     os.environ['notebook_r_mirror'], notebook_config['ip_address'],
                     notebook_config['exploratory_name'], edge_ip)
         try:
@@ -182,8 +182,8 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to configure rstudio.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to configure rstudio.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -192,30 +192,30 @@ if __name__ == "__main__":
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
-            instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['dlab_ssh_user'])
+            instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
             local("~/scripts/{}.py {}".format('install_user_key', params))
         except:
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed installing users key.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed installing users key.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
         print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
-            .format(notebook_config['dlab_ssh_user'], instance_hostname, keyfile_name)
+            .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
             local("~/scripts/{}.py {}".format('manage_git_creds', params))
         except:
-            dlab.fab.append_result("Failed setup git credentials")
+            datalab.fab.append_result("Failed setup git credentials")
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to setup git credentials.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to setup git credentials.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -223,7 +223,7 @@ if __name__ == "__main__":
         print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None']:
             params = "--hostname {} --keyfile {} --os_user {} --nb_tag_name {} --nb_tag_value {}" \
-                .format(instance_hostname, keyfile_name, notebook_config['dlab_ssh_user'],
+                .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['tag_name'], notebook_config['instance_name'])
             try:
                 local("~/scripts/{}.py {}".format('common_remove_remote_kernels', params))
@@ -231,8 +231,8 @@ if __name__ == "__main__":
                 traceback.print_exc()
                 raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to post configuring instance.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to post configuring instance.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     try:
@@ -243,22 +243,22 @@ if __name__ == "__main__":
             'tensor': False
         }
         params = "--edge_hostname {} --keyfile {} --os_user {} --type {} --exploratory_name {} --additional_info '{}'"\
-            .format(edge_instance_hostname, keyfile_name, notebook_config['dlab_ssh_user'], 'rstudio',
+            .format(edge_instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], 'rstudio',
                     notebook_config['exploratory_name'], json.dumps(additional_info))
         try:
             local("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params))
         except:
-            dlab.fab.append_result("Failed edge reverse proxy template")
+            datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed to set edge reverse proxy template.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Failed to set edge reverse proxy template.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     if notebook_config['image_enabled'] == 'true':
         try:
             print('[CREATING AMI]')
-            ami_id = dlab.meta_lib.get_ami_id_by_name(notebook_config['expected_image_name'])
+            ami_id = datalab.meta_lib.get_ami_id_by_name(notebook_config['expected_image_name'])
             if ami_id == '' and notebook_config['shared_image_enabled'] == 'false':
                 print("Looks like it's first time we configure notebook server. Creating image.")
                 try:
@@ -267,7 +267,7 @@ if __name__ == "__main__":
                 except KeyError:
                     os.environ['conf_additional_tags'] = 'project_tag:{0};endpoint_tag:{1}'.format(
                         os.environ['project_name'], os.environ['endpoint_name'])
-                image_id = dlab.actions_lib.create_image_from_instance(
+                image_id = datalab.actions_lib.create_image_from_instance(
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
@@ -279,21 +279,21 @@ if __name__ == "__main__":
                 except KeyError:
                     os.environ['conf_additional_tags'] = 'ami:shared;endpoint_tag:{}'.format(
                         os.environ['endpoint_name'])
-                image_id = dlab.actions_lib.create_image_from_instance(
+                image_id = datalab.actions_lib.create_image_from_instance(
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
                     print("Image was successfully created. It's ID is {}".format(image_id))
         except Exception as err:
-            dlab.fab.append_result("Failed creating image.", str(err))
-            dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+            datalab.fab.append_result("Failed creating image.", str(err))
+            datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
             sys.exit(1)
 
     try:
         # generating output information
-        ip_address = dlab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
+        ip_address = datalab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
                                                            notebook_config['instance_name']).get('Private')
-        dns_name = dlab.meta_lib.get_instance_hostname(notebook_config['tag_name'], notebook_config['instance_name'])
+        dns_name = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'], notebook_config['instance_name'])
         rstudio_ip_url = "http://" + ip_address + ":8787/"
         rstudio_dns_url = "http://" + dns_name + ":8787/"
         rstudio_notebook_access_url = "https://{}/{}/".format(notebook_config['edge_instance_hostname'],
@@ -306,7 +306,7 @@ if __name__ == "__main__":
         print("Instance name: {}".format(notebook_config['instance_name']))
         print("Private DNS: {}".format(dns_name))
         print("Private IP: {}".format(ip_address))
-        print("Instance ID: {}".format(dlab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
+        print("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
                                                                           notebook_config['instance_name'])))
         print("Instance type: {}".format(notebook_config['instance_type']))
         print("Key name: {}".format(notebook_config['key_name']))
@@ -316,18 +316,18 @@ if __name__ == "__main__":
         print("SG name: {}".format(notebook_config['security_group_name']))
         print("Rstudio URL: {}".format(rstudio_ip_url))
         print("Rstudio URL: {}".format(rstudio_dns_url))
-        print("Rstudio user: {}".format(notebook_config['dlab_ssh_user']))
+        print("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
         print("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
         print("Ungit URL: {}".format(ungit_ip_url))
         print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
-              format(notebook_config['key_name'], notebook_config['dlab_ssh_user'], ip_address))
+              format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
         print('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.
-              format(notebook_config['key_name'], notebook_config['dlab_ssh_user'], dns_name))
+              format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], dns_name))
 
         with open("/root/result.json", 'w') as result:
             res = {"hostname": dns_name,
                    "ip": ip_address,
-                   "instance_id": dlab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
+                   "instance_id": datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
                                                                      notebook_config['instance_name']),
                    "master_keyname": os.environ['conf_key_name'],
                    "notebook_name": notebook_config['instance_name'],
@@ -343,10 +343,10 @@ if __name__ == "__main__":
                        #{"description": "Ungit (via tunnel)",
                        # "url": ungit_ip_url}
                    ],
-                   "exploratory_user": notebook_config['dlab_ssh_user'],
+                   "exploratory_user": notebook_config['datalab_ssh_user'],
                    "exploratory_pass": notebook_config['rstudio_pass']}
             result.write(json.dumps(res))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results.", str(err))
-        dlab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
+        datalab.fab.append_result("Error with writing results.", str(err))
+        datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/rstudio_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/aws/rstudio_dataengine-service_create_configs.py
index f82c0f0..443d147 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/rstudio_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/rstudio_dataengine-service_create_configs.py
@@ -31,10 +31,10 @@ import time
 from fabric.api import lcd
 from fabric.contrib.files import exists
 from fabvenv import virtualenv
-from dlab.notebook_lib import *
-from dlab.actions_lib import *
-from dlab.fab import *
-from dlab.common_lib import *
+from datalab.notebook_lib import *
+from datalab.actions_lib import *
+from datalab.fab import *
+from datalab.common_lib import *
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--bucket', type=str, default='')
diff --git a/infrastructure-provisioning/src/general/scripts/aws/rstudio_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/rstudio_install_dataengine-service_kernels.py
index 4393b21..da38168 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/rstudio_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/rstudio_install_dataengine-service_kernels.py
@@ -24,7 +24,7 @@
 import argparse
 from fabric.api import *
 import boto3
-from dlab.meta_lib import *
+from datalab.meta_lib import *
 import os
 
 parser = argparse.ArgumentParser()
@@ -52,13 +52,13 @@ def configure_notebook(args):
         '/tmp/rstudio_dataengine-service_create_configs.py')
     sudo('\cp /tmp/rstudio_dataengine-service_create_configs.py /usr/local/bin/rstudio_dataengine-service_create_configs.py')
     sudo('chmod 755 /usr/local/bin/rstudio_dataengine-service_create_configs.py')
-    sudo('mkdir -p /usr/lib/python2.7/dlab/')
-    run('mkdir -p /tmp/dlab_libs/')
-    local('scp -i {} /usr/lib/python2.7/dlab/* {}:/tmp/dlab_libs/'.format(args.keyfile, env.host_string))
-    run('chmod a+x /tmp/dlab_libs/*')
-    sudo('mv /tmp/dlab_libs/* /usr/lib/python2.7/dlab/')
+    sudo('mkdir -p /usr/lib/python2.7/datalab/')
+    run('mkdir -p /tmp/datalab_libs/')
+    local('scp -i {} /usr/lib/python2.7/datalab/* {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string))
+    run('chmod a+x /tmp/datalab_libs/*')
+    sudo('mv /tmp/datalab_libs/* /usr/lib/python2.7/datalab/')
     if exists('/usr/lib64'):
-        sudo('ln -fs /usr/lib/python2.7/dlab /usr/lib64/python2.7/dlab')
+        sudo('ln -fs /usr/lib/python2.7/datalab /usr/lib64/python2.7/datalab')
 
 
 if __name__ == "__main__":
diff --git a/infrastructure-provisioning/src/general/scripts/aws/ssn_associate_elastic_ip.py b/infrastructure-provisioning/src/general/scripts/aws/ssn_associate_elastic_ip.py
index 55bb37d..3963864 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/ssn_associate_elastic_ip.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/ssn_associate_elastic_ip.py
@@ -22,9 +22,9 @@
 # ******************************************************************************
 
 import sys
-from dlab.fab import *
-from dlab.meta_lib import *
-from dlab.actions_lib import *
+from datalab.fab import *
+from datalab.meta_lib import *
+from datalab.actions_lib import *
 import os
 import argparse
 
diff --git a/infrastructure-provisioning/src/general/scripts/aws/ssn_configure.py b/infrastructure-provisioning/src/general/scripts/aws/ssn_configure.py
index 759c417..be9326e 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/ssn_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/ssn_configure.py
@@ -25,10 +25,10 @@ import logging
 import sys
 import os
 from fabric.api import *
-import dlab.ssn_lib
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.ssn_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import traceback
 import json
 
@@ -44,35 +44,35 @@ if __name__ == "__main__":
 
     def clear_resources():
         if ssn_conf['domain_created']:
-            dlab.actions_lib.remove_route_53_record(os.environ['ssn_hosted_zone_id'],
+            datalab.actions_lib.remove_route_53_record(os.environ['ssn_hosted_zone_id'],
                                                     os.environ['ssn_hosted_zone_name'],
                                                     os.environ['ssn_subdomain'])
-        dlab.actions_lib.remove_ec2(ssn_conf['tag_name'], ssn_conf['instance_name'])
-        dlab.actions_lib.remove_all_iam_resources(ssn_conf['instance'])
-        dlab.actions_lib.remove_s3(ssn_conf['instance'])
+        datalab.actions_lib.remove_ec2(ssn_conf['tag_name'], ssn_conf['instance_name'])
+        datalab.actions_lib.remove_all_iam_resources(ssn_conf['instance'])
+        datalab.actions_lib.remove_s3(ssn_conf['instance'])
         if ssn_conf['pre_defined_sg']:
-            dlab.actions_lib.remove_sgroups(ssn_conf['tag_name'])
+            datalab.actions_lib.remove_sgroups(ssn_conf['tag_name'])
         if ssn_conf['pre_defined_subnet']:
-            dlab.actions_lib.remove_internet_gateways(os.environ['aws_vpc_id'], ssn_conf['tag_name'],
+            datalab.actions_lib.remove_internet_gateways(os.environ['aws_vpc_id'], ssn_conf['tag_name'],
                                                       ssn_conf['service_base_name'])
-            dlab.actions_lib.remove_subnets(ssn_conf['subnet_name'])
+            datalab.actions_lib.remove_subnets(ssn_conf['subnet_name'])
         if ssn_conf['pre_defined_vpc']:
-            dlab.actions_lib.remove_vpc_endpoints(os.environ['aws_vpc_id'])
-            dlab.actions_lib.remove_route_tables(ssn_conf['tag_name'], True)
-            dlab.actions_lib.remove_vpc(os.environ['aws_vpc_id'])
+            datalab.actions_lib.remove_vpc_endpoints(os.environ['aws_vpc_id'])
+            datalab.actions_lib.remove_route_tables(ssn_conf['tag_name'], True)
+            datalab.actions_lib.remove_vpc(os.environ['aws_vpc_id'])
         if ssn_conf['pre_defined_vpc2']:
-            dlab.actions_lib.remove_peering('*')
+            datalab.actions_lib.remove_peering('*')
             try:
-                dlab.actions_lib.remove_vpc_endpoints(os.environ['aws_vpc2_id'])
+                datalab.actions_lib.remove_vpc_endpoints(os.environ['aws_vpc2_id'])
             except:
                 print("There are no VPC Endpoints")
-            dlab.actions_lib.remove_route_tables(ssn_conf['tag2_name'], True)
-            dlab.actions_lib.remove_vpc(os.environ['aws_vpc2_id'])
+            datalab.actions_lib.remove_route_tables(ssn_conf['tag2_name'], True)
+            datalab.actions_lib.remove_vpc(os.environ['aws_vpc2_id'])
 
     try:
         logging.info('[DERIVING NAMES]')
         print('[DERIVING NAMES]')
-        ssn_conf['service_base_name'] = os.environ['conf_service_base_name'] = dlab.fab.replace_multi_symbols(
+        ssn_conf['service_base_name'] = os.environ['conf_service_base_name'] = datalab.fab.replace_multi_symbols(
             os.environ['conf_service_base_name'][:20], '-', True)
         if 'ssn_hosted_zone_id' in os.environ and 'ssn_hosted_zone_name' in os.environ and \
                 'ssn_subdomain' in os.environ:
@@ -95,42 +95,42 @@ if __name__ == "__main__":
         ssn_conf['subnet_name'] = '{}-subnet'.format(ssn_conf['service_base_name'])
         ssn_conf['sg_name'] = '{}-ssn-sg'.format(ssn_conf['service_base_name'])
         ssn_conf['network_type'] = os.environ['conf_network_type']
-        ssn_conf['dlab_ssh_user'] = os.environ['conf_os_user']
+        ssn_conf['datalab_ssh_user'] = os.environ['conf_os_user']
 
         try:
             if os.environ['aws_vpc_id'] == '':
                 raise KeyError
         except KeyError:
             ssn_conf['tag'] = {"Key": ssn_conf['tag_name'], "Value": "{}-subnet".format(ssn_conf['service_base_name'])}
-            os.environ['aws_vpc_id'] = dlab.meta_lib.get_vpc_by_tag(ssn_conf['tag_name'], ssn_conf['service_base_name'])
+            os.environ['aws_vpc_id'] = datalab.meta_lib.get_vpc_by_tag(ssn_conf['tag_name'], ssn_conf['service_base_name'])
             ssn_conf['pre_defined_vpc'] = True
         try:
             if os.environ['aws_subnet_id'] == '':
                 raise KeyError
         except KeyError:
             ssn_conf['tag'] = {"Key": ssn_conf['tag_name'], "Value": "{}-subnet".format(ssn_conf['service_base_name'])}
-            os.environ['aws_subnet_id'] = dlab.meta_lib.get_subnet_by_tag(ssn_conf['tag'], True)
+            os.environ['aws_subnet_id'] = datalab.meta_lib.get_subnet_by_tag(ssn_conf['tag'], True)
             ssn_conf['pre_defined_subnet'] = True
         try:
             if os.environ['conf_duo_vpc_enable'] == 'true' and not os.environ['aws_vpc2_id']:
                 raise KeyError
         except KeyError:
             ssn_conf['tag'] = {"Key": ssn_conf['tag2_name'], "Value": "{}-subnet".format(ssn_conf['service_base_name'])}
-            os.environ['aws_vpc2_id'] = dlab.meta_lib.get_vpc_by_tag(ssn_conf['tag2_name'],
+            os.environ['aws_vpc2_id'] = datalab.meta_lib.get_vpc_by_tag(ssn_conf['tag2_name'],
                                                                      ssn_conf['service_base_name'])
             ssn_conf['pre_defined_vpc2'] = True
         try:
             if os.environ['conf_duo_vpc_enable'] == 'true' and not os.environ['aws_peering_id']:
                 raise KeyError
         except KeyError:
-            os.environ['aws_peering_id'] = dlab.meta_lib.get_peering_by_tag(ssn_conf['tag_name'],
+            os.environ['aws_peering_id'] = datalab.meta_lib.get_peering_by_tag(ssn_conf['tag_name'],
                                                                             ssn_conf['service_base_name'])
             ssn_conf['pre_defined_peering'] = True
         try:
             if os.environ['aws_security_groups_ids'] == '':
                 raise KeyError
         except KeyError:
-            os.environ['aws_security_groups_ids'] = dlab.meta_lib.get_security_group_by_name(ssn_conf['sg_name'])
+            os.environ['aws_security_groups_ids'] = datalab.meta_lib.get_security_group_by_name(ssn_conf['sg_name'])
             ssn_conf['pre_defined_sg'] = True
         try:
             if os.environ['aws_account_id'] == '':
@@ -148,7 +148,7 @@ if __name__ == "__main__":
         except KeyError:
             os.environ['aws_report_path'] = ''
     except Exception as err:
-        dlab.fab.append_result("Failed to generate variables dictionary.", str(err))
+        datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
         clear_resources()
         sys.exit(1)
 
@@ -161,28 +161,28 @@ if __name__ == "__main__":
             ssn_conf['sudo_group'] = 'wheel'
 
         if ssn_conf['network_type'] == 'private':
-            ssn_conf['instance_hostname'] = dlab.meta_lib.get_instance_ip_address(
+            ssn_conf['instance_hostname'] = datalab.meta_lib.get_instance_ip_address(
                 ssn_conf['tag_name'], ssn_conf['instance_name']).get('Private')
         else:
-            ssn_conf['instance_hostname'] = dlab.meta_lib.get_instance_hostname(
+            ssn_conf['instance_hostname'] = datalab.meta_lib.get_instance_hostname(
                 ssn_conf['tag_name'], ssn_conf['instance_name'])
 
         if os.environ['conf_stepcerts_enabled'] == 'true':
-            ssn_conf['step_cert_sans'] = ' --san {0} '.format(dlab.meta_lib.get_instance_ip_address(
+            ssn_conf['step_cert_sans'] = ' --san {0} '.format(datalab.meta_lib.get_instance_ip_address(
                 ssn_conf['tag_name'], ssn_conf['instance_name']).get('Private'))
             if ssn_conf['network_type'] == 'public':
                 ssn_conf['step_cert_sans'] += ' --san {0} --san {1}'.format(
-                    dlab.meta_lib.get_instance_hostname(ssn_conf['tag_name'], ssn_conf['instance_name']),
-                    dlab.meta_lib.get_instance_ip_address(ssn_conf['tag_name'],
+                    datalab.meta_lib.get_instance_hostname(ssn_conf['tag_name'], ssn_conf['instance_name']),
+                    datalab.meta_lib.get_instance_ip_address(ssn_conf['tag_name'],
                                                           ssn_conf['instance_name']).get('Public'))
         else:
             ssn_conf['step_cert_sans'] = ''
 
-        logging.info('[CREATING DLAB SSH USER]')
-        print('[CREATING DLAB SSH USER]')
+        logging.info('[CREATING DATA LAB SSH USER]')
+        print('[CREATING DATA LAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             ssn_conf['instance_hostname'], os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
-            ssn_conf['initial_user'], ssn_conf['dlab_ssh_user'], ssn_conf['sudo_group'])
+            ssn_conf['initial_user'], ssn_conf['datalab_ssh_user'], ssn_conf['sudo_group'])
 
         try:
             local("~/scripts/{}.py {}".format('create_ssh_user', params))
@@ -190,7 +190,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed creating ssh user 'dlab'.", str(err))
+        datalab.fab.append_result("Failed creating ssh user 'datalab'.", str(err))
         clear_resources()
         sys.exit(1)
 
@@ -200,7 +200,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --pip_packages 'boto3 backoff argparse fabric==1.14.0 awscli pymongo " \
                  "pyyaml jinja2' --user {} --region {}". \
             format(ssn_conf['instance_hostname'], os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
-                   ssn_conf['dlab_ssh_user'], os.environ['aws_region'])
+                   ssn_conf['datalab_ssh_user'], os.environ['aws_region'])
 
         try:
             local("~/scripts/{}.py {}".format('install_prerequisites', params))
@@ -208,7 +208,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed installing software: pip, packages.", str(err))
+        datalab.fab.append_result("Failed installing software: pip, packages.", str(err))
         clear_resources()
         sys.exit(1)
 
@@ -220,11 +220,11 @@ if __name__ == "__main__":
                              "security_group_id": os.environ['aws_security_groups_ids'],
                              "vpc_id": os.environ['aws_vpc_id'], "subnet_id": os.environ['aws_subnet_id'],
                              "admin_key": os.environ['conf_key_name']}
-        params = "--hostname {} --keyfile {} --additional_config '{}' --os_user {} --dlab_path {} " \
+        params = "--hostname {} --keyfile {} --additional_config '{}' --os_user {} --datalab_path {} " \
                  "--tag_resource_id {} --step_cert_sans '{}' ".format(
                   ssn_conf['instance_hostname'],
                   "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
-                  json.dumps(additional_config), ssn_conf['dlab_ssh_user'], os.environ['ssn_dlab_path'],
+                  json.dumps(additional_config), ssn_conf['datalab_ssh_user'], os.environ['ssn_datalab_path'],
                   os.environ['conf_tag_resource_id'], ssn_conf['step_cert_sans'])
 
         try:
@@ -233,7 +233,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Failed configuring ssn.", str(err))
+        datalab.fab.append_result("Failed configuring ssn.", str(err))
         clear_resources()
         sys.exit(1)
 
@@ -252,12 +252,12 @@ if __name__ == "__main__":
                              {"name": "deeplearning", "tag": "latest"},
                              {"name": "dataengine-service", "tag": "latest"},
                              {"name": "dataengine", "tag": "latest"}]
-        params = "--hostname {} --keyfile {} --additional_config '{}' --os_family {} --os_user {} --dlab_path {} " \
+        params = "--hostname {} --keyfile {} --additional_config '{}' --os_family {} --os_user {} --datalab_path {} " \
                  "--cloud_provider {} --region {}".format(ssn_conf['instance_hostname'],
                                                           "{}{}.pem".format(os.environ['conf_key_dir'],
                                                                             os.environ['conf_key_name']),
                                                           json.dumps(additional_config), os.environ['conf_os_family'],
-                                                          ssn_conf['dlab_ssh_user'], os.environ['ssn_dlab_path'],
+                                                          ssn_conf['datalab_ssh_user'], os.environ['ssn_datalab_path'],
                                                           os.environ['conf_cloud_provider'], os.environ['aws_region'])
 
         try:
@@ -266,7 +266,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Unable to configure docker.", str(err))
+        datalab.fab.append_result("Unable to configure docker.", str(err))
         clear_resources()
         sys.exit(1)
 
@@ -274,7 +274,7 @@ if __name__ == "__main__":
         cloud_params = [
             {
                 'key': 'KEYCLOAK_REDIRECT_URI',
-                'value': "https://{0}/".format(dlab.meta_lib.get_instance_hostname(ssn_conf['tag_name'],
+                'value': "https://{0}/".format(datalab.meta_lib.get_instance_hostname(ssn_conf['tag_name'],
                                                                                    ssn_conf['instance_name']))
             },
             {
@@ -557,7 +557,7 @@ if __name__ == "__main__":
         print('[CONFIGURE SSN INSTANCE UI]')
         params = "--hostname {} " \
                  "--keyfile {} " \
-                 "--dlab_path {} " \
+                 "--datalab_path {} " \
                  "--os_user {} " \
                  "--os_family {} " \
                  "--request_id {} " \
@@ -572,7 +572,7 @@ if __name__ == "__main__":
                  "--report_path '{}' " \
                  "--billing_enabled {} " \
                  "--cloud_params '{}' " \
-                 "--dlab_id '{}' " \
+                 "--datalab_id '{}' " \
                  "--usage_date {} " \
                  "--product {} " \
                  "--usage_type {} " \
@@ -586,8 +586,8 @@ if __name__ == "__main__":
                  "--keycloak_auth_server_url {}". \
             format(ssn_conf['instance_hostname'],
                    "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
-                   os.environ['ssn_dlab_path'],
-                   ssn_conf['dlab_ssh_user'],
+                   os.environ['ssn_datalab_path'],
+                   ssn_conf['datalab_ssh_user'],
                    os.environ['conf_os_family'],
                    os.environ['request_id'],
                    os.environ['conf_resource'],
@@ -601,7 +601,7 @@ if __name__ == "__main__":
                    os.environ['aws_report_path'],
                    ssn_conf['billing_enabled'],
                    json.dumps(cloud_params),
-                   os.environ['dlab_id'],
+                   os.environ['datalab_id'],
                    os.environ['usage_date'],
                    os.environ['product'],
                    os.environ['usage_type'],
@@ -619,7 +619,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        dlab.fab.append_result("Unable to configure UI.", str(err))
+        datalab.fab.append_result("Unable to configure UI.", str(err))
         clear_resources()
         sys.exit(1)
 
@@ -639,15 +639,15 @@ if __name__ == "__main__":
         print("SSN instance shape: {}".format(os.environ['aws_ssn_instance_size']))
         print("SSN AMI name: {}".format(ssn_conf['ssn_image_name']))
         print("Region: {}".format(ssn_conf['region']))
-        ssn_conf['jenkins_url'] = "http://{}/jenkins".format(dlab.meta_lib.get_instance_hostname(
+        ssn_conf['jenkins_url'] = "http://{}/jenkins".format(datalab.meta_lib.get_instance_hostname(
             ssn_conf['tag_name'], ssn_conf['instance_name']))
-        ssn_conf['jenkins_url_https'] = "https://{}/jenkins".format(dlab.meta_lib.get_instance_hostname(
+        ssn_conf['jenkins_url_https'] = "https://{}/jenkins".format(datalab.meta_lib.get_instance_hostname(
             ssn_conf['tag_name'], ssn_conf['instance_name']))
         print("Jenkins URL: {}".format(ssn_conf['jenkins_url']))
         print("Jenkins URL HTTPS: {}".format(ssn_conf['jenkins_url_https']))
-        print("DLab UI HTTP URL: http://{}".format(dlab.meta_lib.get_instance_hostname(
+        print("Data Lab UI HTTP URL: http://{}".format(datalab.meta_lib.get_instance_hostname(
             ssn_conf['tag_name'], ssn_conf['instance_name'])))
-        print("DLab UI HTTPS URL: https://{}".format(dlab.meta_lib.get_instance_hostname(
+        print("Data Lab UI HTTPS URL: https://{}".format(datalab.meta_lib.get_instance_hostname(
             ssn_conf['tag_name'], ssn_conf['instance_name'])))
         try:
             with open('jenkins_creds.txt') as f:
@@ -658,7 +658,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as f:
             res = {"service_base_name": ssn_conf['service_base_name'],
                    "instance_name": ssn_conf['instance_name'],
-                   "instance_hostname": dlab.meta_lib.get_instance_hostname(ssn_conf['tag_name'],
+                   "instance_hostname": datalab.meta_lib.get_instance_hostname(ssn_conf['tag_name'],
                                                                             ssn_conf['instance_name']),
                    "role_name": ssn_conf['role_name'],
                    "role_profile_name": ssn_conf['role_profile_name'],
@@ -674,7 +674,7 @@ if __name__ == "__main__":
 
         print('Upload response file')
         params = "--instance_name {} --local_log_filepath {} --os_user {} --instance_hostname {}".\
-            format(ssn_conf['instance_name'], local_log_filepath, ssn_conf['dlab_ssh_user'],
+            format(ssn_conf['instance_name'], local_log_filepath, ssn_conf['datalab_ssh_user'],
                    ssn_conf['instance_hostname'])
         local("~/scripts/{}.py {}".format('upload_response_file', params))
 
@@ -685,6 +685,6 @@ if __name__ == "__main__":
             params += "--key_id {}".format(os.environ['aws_access_key'])
             local("~/scripts/{}.py {}".format('ssn_finalize', params))
     except Exception as err:
-        dlab.fab.append_result("Error with writing results.", str(err))
+        datalab.fab.append_result("Error with writing results.", str(err))
         clear_resources()
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/ssn_create_endpoint.py b/infrastructure-provisioning/src/general/scripts/aws/ssn_create_endpoint.py
index f44c6d9..7d751be 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/ssn_create_endpoint.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/ssn_create_endpoint.py
@@ -22,11 +22,11 @@
 # ******************************************************************************
 
 import argparse
-from dlab.actions_lib import *
-from dlab.meta_lib import *
+from datalab.actions_lib import *
+from datalab.meta_lib import *
 import sys
 import boto3, botocore
-from dlab.ssn_lib import *
+from datalab.ssn_lib import *
 import time
 
 parser = argparse.ArgumentParser()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/ssn_create_vpc.py b/infrastructure-provisioning/src/general/scripts/aws/ssn_create_vpc.py
index 3ab99dd..fa3348e 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/ssn_create_vpc.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/ssn_create_vpc.py
@@ -22,8 +22,8 @@
 # ******************************************************************************
 
 import argparse
-from dlab.actions_lib import *
-from dlab.meta_lib import *
+from datalab.actions_lib import *
+from datalab.meta_lib import *
 
 
 parser = argparse.ArgumentParser()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/ssn_finalize.py b/infrastructure-provisioning/src/general/scripts/aws/ssn_finalize.py
index ee6ac9d..0586714 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/ssn_finalize.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/ssn_finalize.py
@@ -24,7 +24,7 @@
 import boto3
 import argparse
 import sys
-from dlab.ssn_lib import *
+from datalab.ssn_lib import *
 
 
 parser = argparse.ArgumentParser()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/ssn_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/ssn_prepare.py
index 45c65f2..109aca8 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/ssn_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/ssn_prepare.py
@@ -25,10 +25,10 @@ import logging
 import sys
 import os
 from fabric.api import *
-import dlab.ssn_lib
-import dlab.fab
-import dlab.actions_lib
-import dlab.meta_lib
+import datalab.ssn_lib
+import datalab.fab
+import datalab.actions_lib
+import datalab.meta_lib
 import traceback
 import json
 
@@ -49,19 +49,19 @@ if __name__ == "__main__":
         logging.info('[CREATE AWS CONFIG FILE]')
         print('[CREATE AWS CONFIG FILE]')
         if 'aws_access_key' in os.environ and 'aws_secret_access_key' in os.environ:
-            dlab.actions_lib.create_aws_config_files(generate_full_config=True)
+            datalab.actions_lib.create_aws_config_files(generate_full_config=True)
         else:
-            dlab.actions_lib.create_aws_config_files()
+            datalab.actions_lib.create_aws_config_files()
     except Exception as err:
         logging.info('Unable to create configuration')
-        dlab.fab.append_result("Unable to create configuration", err)
+        datalab.fab.append_result("Unable to create configuration", err)
         traceback.print_exc()
         sys.exit(1)
 
     try:
         logging.info('[DERIVING NAMES]')
         print('[DERIVING NAMES]')
-        ssn_conf['service_base_name'] = os.environ['conf_service_base_name'] = dlab.fab.replace_multi_symbols(
+        ssn_conf['service_base_name'] = os.environ['conf_service_base_name'] = datalab.fab.replace_multi_symbols(
             os.environ['conf_service_base_name'][:20], '-', True)
         ssn_conf['role_name'] = '{}-ssn-role'.format(ssn_conf['service_base_name'])
         ssn_conf['role_profile_name'] = '{}-ssn-profile'.format(ssn_conf['service_base_name'])
@@ -73,7 +73,7 @@ if __name__ == "__main__":
         ssn_conf['region'] = os.environ['aws_region']
         ssn_conf['zone_full'] = os.environ['aws_region'] + os.environ['aws_zone']
         ssn_conf['ssn_image_name'] = os.environ['aws_{}_image_name'.format(os.environ['conf_os_family'])]
-        ssn_conf['ssn_ami_id'] = dlab.meta_lib.get_ami_id(ssn_conf['ssn_image_name'])
+        ssn_conf['ssn_ami_id'] = datalab.meta_lib.get_ami_id(ssn_conf['ssn_image_name'])
         ssn_conf['policy_path'] = '/root/files/ssn_policy.json'
         ssn_conf['vpc_cidr'] = os.environ['conf_vpc_cidr']
         ssn_conf['vpc2_cidr'] = os.environ['conf_vpc2_cidr']
@@ -88,10 +88,10 @@ if __name__ == "__main__":
         ssn_conf['all_ip_cidr'] = '0.0.0.0/0'
         ssn_conf['elastic_ip_name'] = '{0}-ssn-static-ip'.format(ssn_conf['service_base_name'])
     except Exception as err:
-        dlab.fab.append_result("Failed to generate variables dictionary.", str(err))
+        datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
         sys.exit(1)
 
-    if dlab.meta_lib.get_instance_by_name(ssn_conf['tag_name'], ssn_conf['instance_name']):
+    if datalab.meta_lib.get_instance_by_name(ssn_conf['tag_name'], ssn_conf['instance_name']):
         print("Service base name should be unique and less or equal 20 symbols. Please try again.")
         sys.exit(1)
 
@@ -111,14 +111,14 @@ if __name__ == "__main__":
             except:
                 traceback.print_exc()
                 raise Exception
-            os.environ['aws_vpc_id'] = dlab.meta_lib.get_vpc_by_tag(ssn_conf['tag_name'],
+            os.environ['aws_vpc_id'] = datalab.meta_lib.get_vpc_by_tag(ssn_conf['tag_name'],
                                                                     ssn_conf['service_base_name'])
         except Exception as err:
-            dlab.fab.append_result("Failed to create VPC", str(err))
+            datalab.fab.append_result("Failed to create VPC", str(err))
             sys.exit(1)
 
     ssn_conf['allowed_vpc_cidr_ip_ranges'] = list()
-    for cidr in dlab.meta_lib.get_vpc_cidr_by_id(os.environ['aws_vpc_id']):
+    for cidr in datalab.meta_lib.get_vpc_cidr_by_id(os.environ['aws_vpc_id']):
         ssn_conf['allowed_vpc_cidr_ip_ranges'].append({"CidrIp": cidr})
 
     try:
@@ -137,15 +137,15 @@ if __name__ == "__main__":
             except:
                 traceback.print_exc()
                 raise Exception
-            os.environ['aws_vpc2_id'] = dlab.meta_lib.get_vpc_by_tag(ssn_conf['tag2_name'],
+            os.environ['aws_vpc2_id'] = datalab.meta_lib.get_vpc_by_tag(ssn_conf['tag2_name'],
                                                                      ssn_conf['service_base_name'])
         except Exception as err:
-            dlab.fab.append_result("Failed to create secondary VPC.", str(err))
+            datalab.fab.append_result("Failed to create secondary VPC.", str(err))
             if ssn_conf['pre_defined_vpc']:
-                dlab.actions_lib.remove_internet_gateways(os.environ['aws_vpc_id'], ssn_conf['tag_name'],
+                datalab.actions_lib.remove_internet_gateways(os.environ['aws_vpc_id'], ssn_conf['tag_name'],
                                                           ssn_conf['service_base_name'])
-                dlab.actions_lib.remove_route_tables(ssn_conf['tag_name'], True)
-                dlab.actions_lib.remove_vpc(os.environ['aws_vpc_id'])
+                datalab.actions_lib.remove_route_tables(ssn_conf['tag_name'], True)
+                datalab.actions_lib.remove_vpc(os.environ['aws_vpc_id'])
             sys.exit(1)
 
     try:
@@ -167,25 +167,25 @@ if __name__ == "__main__":
                 raise Exception
             with open('/tmp/ssn_subnet_id', 'r') as f:
                 os.environ['aws_subnet_id'] = f.read()
-            dlab.actions_lib.enable_auto_assign_ip(os.environ['aws_subnet_id'])
+            datalab.actions_lib.enable_auto_assign_ip(os.environ['aws_subnet_id'])
         except Exception as err:
-            dlab.fab.append_result("Failed to create Subnet.", str(err))
+            datalab.fab.append_result("Failed to create Subnet.", str(err))
             if ssn_conf['pre_defined_vpc']:
-                dlab.actions_lib.remove_internet_gateways(os.environ['aws_vpc_id'], ssn_conf['tag_name'],
+                datalab.actions_lib.remove_internet_gateways(os.environ['aws_vpc_id'], ssn_conf['tag_name'],
                                                           ssn_conf['service_base_name'])
-                dlab.actions_lib.remove_route_tables(ssn_conf['tag_name'], True)
+                datalab.actions_lib.remove_route_tables(ssn_conf['tag_name'], True)
                 try:
-                    dlab.actions_lib.remove_subnets(ssn_conf['subnet_name'])
+                    datalab.actions_lib.remove_subnets(ssn_conf['subnet_name'])
                 except:
                     print("Subnet hasn't been created.")
-                dlab.actions_lib.remove_vpc(os.environ['aws_vpc_id'])
+                datalab.actions_lib.remove_vpc(os.environ['aws_vpc_id'])
             if ssn_conf['pre_defined_vpc2']:
                 try:
-                    dlab.actions_lib.remove_vpc_endpoints(os.environ['aws_vpc2_id'])
+                    datalab.actions_lib.remove_vpc_endpoints(os.environ['aws_vpc2_id'])
                 except:
                     print("There are no VPC Endpoints")
-                dlab.actions_lib.remove_route_tables(ssn_conf['tag2_name'], True)
-                dlab.actions_lib.remove_vpc(os.environ['aws_vpc2_id'])
+                datalab.actions_lib.remove_route_tables(ssn_conf['tag2_name'], True)
+                datalab.actions_lib.remove_vpc(os.environ['aws_vpc2_id'])
             sys.exit(1)
 
     try:
@@ -195,29 +195,29 @@ if __name__ == "__main__":
         try:
             logging.info('[CREATE PEERING CONNECTION]')
             print('[CREATE PEERING CONNECTION]')
-            os.environ['aws_peering_id'] = dlab.actions_lib.create_peering_connection(
+            os.environ['aws_peering_id'] = datalab.actions_lib.create_peering_connection(
                 os.environ['aws_vpc_id'], os.environ['aws_vpc2_id'], ssn_conf['service_base_name'])
             print('PEERING CONNECTION ID:' + os.environ['aws_peering_id'])
-            dlab.actions_lib.create_route_by_id(os.environ['aws_subnet_id'], os.environ['aws_vpc_id'],
+            datalab.actions_lib.create_route_by_id(os.environ['aws_subnet_id'], os.environ['aws_vpc_id'],
                                                 os.environ['aws_peering_id'],
-                                                dlab.meta_lib.get_cidr_by_vpc(os.environ['aws_vpc2_id']))
... 17248 lines suppressed ...


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org