You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@dlab.apache.org by om...@apache.org on 2020/03/11 09:52:45 UTC

[incubator-dlab] branch DLAB-1546 updated (1127431 -> df0024a)

This is an automated email from the ASF dual-hosted git repository.

omartushevskyi pushed a change to branch DLAB-1546
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git.


    from 1127431  [DLAB-1546]: added name convention changes
     add 29ba942  [DLAB-1612]: added replacement of uppercase letters and underscores in additional tags
     add da9034c  [DLAB-1612]: changed formatting
     add 30887f7  [DLAB-1612]: changed formatting and removed unnecessary if statement
     add 64c9828  Merge pull request #637 from apache/DLAB-1612
     add c6d35ee  [DLAB-1615]: Added some billing values on UI (#640)
     add 71c38dc  [DLAB-1524] The endpoints field verification (#639)
     add 01927e5  [DLAB-1541] Added project name as a unique part of the exploratory
     add 349897e  [DLAB-1541]: Changed routes on UI
     add 08f1dc3  [DLAB-1541] Fixed bugs with libs and comp resource statuses
     add 6695fe7  [DLAB-1586] Stopping/starting/terminating statuses of notebook conveyed to DLab UI
     add b04499f  [DLAB-1585]: Fixed creation computational resources with the same name for second project
     add 082e5f4  Merge remote-tracking branch 'origin/DLAB-1541' into DLAB-1541
     add 37f9cdf  Merge branch 'DLAB-1541' into develop
     add 6c04b0d  Fixed tests
     add e2aa6f9  [DLAB-384]: Grouped roles (#642)
     new df0024a  Merge branch 'develop' of github.com:apache/incubator-dlab into DLAB-1546

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 ...common_notebook_configure_dataengine-service.py |  10 +-
 .../general/scripts/gcp/common_prepare_notebook.py |  11 +-
 .../scripts/gcp/dataengine-service_prepare.py      |   9 +-
 .../src/general/scripts/gcp/dataengine_prepare.py  |   9 +-
 .../java/com/epam/dlab/dto/StatusEnvBaseDTO.java   |  14 +
 .../handlers/ComputationalCallbackHandler.java     |   5 +-
 .../ComputationalConfigureCallbackHandler.java     |   1 +
 .../handlers/ExploratoryCallbackHandler.java       |   8 +-
 .../handlers/LibInstallCallbackHandler.java        |   5 +-
 .../resources/base/ExploratoryService.java         |   2 +-
 .../core/commands/CommandExecutorMockTest.java     |   2 +-
 .../epam/dlab/backendapi/dao/ComputationalDAO.java | 182 ++--
 .../java/com/epam/dlab/backendapi/dao/EnvDAO.java  |  74 +-
 .../epam/dlab/backendapi/dao/ExploratoryDAO.java   | 158 +---
 .../dlab/backendapi/dao/ExploratoryLibDAO.java     |  60 +-
 .../com/epam/dlab/backendapi/dao/IndexCreator.java |   7 +-
 .../epam/dlab/backendapi/dao/SchedulerJobDAO.java  |  36 +-
 .../epam/dlab/backendapi/dao/UserRoleDaoImpl.java  |   2 +
 .../epam/dlab/backendapi/domain/EndpointDTO.java   |   6 +-
 .../backendapi/resources/EnvironmentResource.java  |  64 +-
 .../backendapi/resources/ExploratoryResource.java  |  29 +-
 .../resources/ImageExploratoryResource.java        |  18 +-
 .../resources/LibExploratoryResource.java          |  31 +-
 .../backendapi/resources/SchedulerJobResource.java |  31 +-
 .../resources/aws/ComputationalResourceAws.java    |  20 +-
 .../azure/ComputationalResourceAzure.java          |  20 +-
 .../callback/CheckInactivityCallback.java          |   5 +-
 .../resources/callback/ComputationalCallback.java  |  15 +-
 .../callback/EnvironmentStatusCallback.java        |   4 +-
 .../resources/callback/ExploratoryCallback.java    |  26 +-
 .../dto/ExploratoryImageCreateFormDTO.java         |  16 +-
 .../resources/dto/LibInstallFormDTO.java           |   3 +
 .../resources/dto/SearchLibsFormDTO.java           |   4 +
 .../dlab/backendapi/resources/dto/UserRoleDto.java |  10 +-
 .../resources/gcp/ComputationalResourceGcp.java    |  20 +-
 .../backendapi/service/ComputationalService.java   |  18 +-
 .../backendapi/service/EnvironmentService.java     |  11 -
 .../backendapi/service/ExploratoryService.java     |  26 +-
 .../service/ImageExploratoryService.java           |  10 +-
 .../dlab/backendapi/service/InactivityService.java |   8 +-
 .../dlab/backendapi/service/LibraryService.java    |  10 +-
 .../backendapi/service/SchedulerJobService.java    | 106 +--
 .../service/impl/ComputationalServiceImpl.java     |  75 +-
 .../service/impl/EnvironmentServiceImpl.java       |  32 +-
 .../service/impl/ExploratoryServiceImpl.java       | 115 +--
 .../service/impl/ImageExploratoryServiceImpl.java  |  10 +-
 .../service/impl/InactivityServiceImpl.java        |   4 +-
 .../service/impl/LibraryServiceImpl.java           |  50 +-
 .../service/impl/ProjectServiceImpl.java           |  15 +-
 .../service/impl/ReuploadKeyServiceImpl.java       |  66 +-
 .../service/impl/SchedulerJobServiceImpl.java      |  81 +-
 .../src/main/resources/mongo/gcp/mongo_roles.json  |  26 +
 .../app/administration/roles/roles.component.html  |  64 +-
 .../app/administration/roles/roles.component.scss  |  15 +-
 .../app/administration/roles/roles.component.ts    |  29 +-
 .../services/dataengineConfiguration.service.ts    |  16 +-
 .../core/services/librariesInstallation.service.ts |  12 +-
 .../src/app/core/services/scheduler.service.ts     |   8 +-
 .../src/app/core/services/userResource.service.ts  |   6 +-
 .../reporting-grid/reporting-grid.component.html   |   9 +-
 .../cluster-details/cluster-details.component.ts   |   4 +-
 .../computational-resources-list.component.ts      |   2 +-
 .../ami-create-dialog.component.ts                 |   3 +-
 .../detail-dialog/detail-dialog.component.ts       |   4 +-
 .../install-libraries.component.ts                 |   4 +-
 .../install-libraries/install-libraries.model.ts   |   9 +-
 .../resources-grid/resources-grid.component.ts     |   8 +-
 .../app/resources/scheduler/scheduler.component.ts |  23 +-
 .../src/app/resources/scheduler/scheduler.model.ts |  12 +-
 .../webapp/src/app/shared/form-controls/index.ts   |   5 +-
 .../multi-level-select-dropdown.component.html}    |  27 +-
 .../multi-level-select-dropdown.component.scss}    |  80 +-
 .../multi-level-select-dropdown.component.ts       | 101 +++
 .../webapp/src/assets/styles/_dialogs.scss         |   2 +-
 .../resources/webapp/src/assets/styles/_theme.scss |   6 +
 .../webapp/src/dictionary/azure.dictionary.ts      |   6 +-
 .../resources/EnvironmentResourceTest.java         | 256 +-----
 .../resources/ExploratoryResourceTest.java         |  57 +-
 .../resources/ImageExploratoryResourceTest.java    |  25 +-
 .../resources/LibExploratoryResourceTest.java      | 602 +++++++------
 .../resources/SchedulerJobResourceTest.java        | 383 ++++----
 .../service/impl/ComputationalServiceImplTest.java | 993 +++++++++++----------
 .../service/impl/EnvironmentServiceImplTest.java   | 124 +--
 .../service/impl/ExploratoryServiceImplTest.java   | 586 ++++++------
 .../impl/ImageExploratoryServiceImplTest.java      |  61 +-
 .../service/impl/LibraryServiceImplTest.java       | 464 +++++-----
 .../service/impl/ReuploadKeyServiceImplTest.java   |  49 +-
 .../service/impl/SchedulerJobServiceImplTest.java  | 250 +++---
 88 files changed, 2830 insertions(+), 3055 deletions(-)
 copy services/self-service/src/main/resources/webapp/src/app/shared/form-controls/{multi-select-dropdown/multi-select-dropdown.component.html => multi-level-select-dropdown/multi-level-select-dropdown.component.html} (57%)
 copy services/self-service/src/main/resources/webapp/src/app/shared/form-controls/{dropdowns.component.scss => multi-level-select-dropdown/multi-level-select-dropdown.component.scss} (79%)
 create mode 100644 services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-level-select-dropdown/multi-level-select-dropdown.component.ts


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org


[incubator-dlab] 01/01: Merge branch 'develop' of github.com:apache/incubator-dlab into DLAB-1546

Posted by om...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

omartushevskyi pushed a commit to branch DLAB-1546
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git

commit df0024a848363b156616f470b643e93bba9bf27f
Merge: 1127431 e2aa6f9
Author: Oleh Martushevskyi <Ol...@epam.com>
AuthorDate: Wed Mar 11 11:52:27 2020 +0200

    Merge branch 'develop' of github.com:apache/incubator-dlab into DLAB-1546
    
     Conflicts:
    	infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
    	infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
    	infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
    	services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/ComputationalServiceImpl.java

 ...common_notebook_configure_dataengine-service.py |  10 +-
 .../general/scripts/gcp/common_prepare_notebook.py |  11 +-
 .../scripts/gcp/dataengine-service_prepare.py      |   9 +-
 .../src/general/scripts/gcp/dataengine_prepare.py  |   9 +-
 .../java/com/epam/dlab/dto/StatusEnvBaseDTO.java   |  14 +
 .../handlers/ComputationalCallbackHandler.java     |   5 +-
 .../ComputationalConfigureCallbackHandler.java     |   1 +
 .../handlers/ExploratoryCallbackHandler.java       |   8 +-
 .../handlers/LibInstallCallbackHandler.java        |   5 +-
 .../resources/base/ExploratoryService.java         |   2 +-
 .../core/commands/CommandExecutorMockTest.java     |   2 +-
 .../epam/dlab/backendapi/dao/ComputationalDAO.java | 182 ++--
 .../java/com/epam/dlab/backendapi/dao/EnvDAO.java  |  74 +-
 .../epam/dlab/backendapi/dao/ExploratoryDAO.java   | 158 +---
 .../dlab/backendapi/dao/ExploratoryLibDAO.java     |  60 +-
 .../com/epam/dlab/backendapi/dao/IndexCreator.java |   7 +-
 .../epam/dlab/backendapi/dao/SchedulerJobDAO.java  |  36 +-
 .../epam/dlab/backendapi/dao/UserRoleDaoImpl.java  |   2 +
 .../epam/dlab/backendapi/domain/EndpointDTO.java   |   6 +-
 .../backendapi/resources/EnvironmentResource.java  |  64 +-
 .../backendapi/resources/ExploratoryResource.java  |  29 +-
 .../resources/ImageExploratoryResource.java        |  18 +-
 .../resources/LibExploratoryResource.java          |  31 +-
 .../backendapi/resources/SchedulerJobResource.java |  31 +-
 .../resources/aws/ComputationalResourceAws.java    |  20 +-
 .../azure/ComputationalResourceAzure.java          |  20 +-
 .../callback/CheckInactivityCallback.java          |   5 +-
 .../resources/callback/ComputationalCallback.java  |  15 +-
 .../callback/EnvironmentStatusCallback.java        |   4 +-
 .../resources/callback/ExploratoryCallback.java    |  26 +-
 .../dto/ExploratoryImageCreateFormDTO.java         |  16 +-
 .../resources/dto/LibInstallFormDTO.java           |   3 +
 .../resources/dto/SearchLibsFormDTO.java           |   4 +
 .../dlab/backendapi/resources/dto/UserRoleDto.java |  10 +-
 .../resources/gcp/ComputationalResourceGcp.java    |  20 +-
 .../backendapi/service/ComputationalService.java   |  18 +-
 .../backendapi/service/EnvironmentService.java     |  11 -
 .../backendapi/service/ExploratoryService.java     |  26 +-
 .../service/ImageExploratoryService.java           |  10 +-
 .../dlab/backendapi/service/InactivityService.java |   8 +-
 .../dlab/backendapi/service/LibraryService.java    |  10 +-
 .../backendapi/service/SchedulerJobService.java    | 106 +--
 .../service/impl/ComputationalServiceImpl.java     |  75 +-
 .../service/impl/EnvironmentServiceImpl.java       |  32 +-
 .../service/impl/ExploratoryServiceImpl.java       | 115 +--
 .../service/impl/ImageExploratoryServiceImpl.java  |  10 +-
 .../service/impl/InactivityServiceImpl.java        |   4 +-
 .../service/impl/LibraryServiceImpl.java           |  50 +-
 .../service/impl/ProjectServiceImpl.java           |  15 +-
 .../service/impl/ReuploadKeyServiceImpl.java       |  66 +-
 .../service/impl/SchedulerJobServiceImpl.java      |  81 +-
 .../src/main/resources/mongo/gcp/mongo_roles.json  |  26 +
 .../app/administration/roles/roles.component.html  |  64 +-
 .../app/administration/roles/roles.component.scss  |  15 +-
 .../app/administration/roles/roles.component.ts    |  29 +-
 .../services/dataengineConfiguration.service.ts    |  16 +-
 .../core/services/librariesInstallation.service.ts |  12 +-
 .../src/app/core/services/scheduler.service.ts     |   8 +-
 .../src/app/core/services/userResource.service.ts  |   6 +-
 .../reporting-grid/reporting-grid.component.html   |   9 +-
 .../cluster-details/cluster-details.component.ts   |   4 +-
 .../computational-resources-list.component.ts      |   2 +-
 .../ami-create-dialog.component.ts                 |   3 +-
 .../detail-dialog/detail-dialog.component.ts       |   4 +-
 .../install-libraries.component.ts                 |   4 +-
 .../install-libraries/install-libraries.model.ts   |   9 +-
 .../resources-grid/resources-grid.component.ts     |   8 +-
 .../app/resources/scheduler/scheduler.component.ts |  23 +-
 .../src/app/resources/scheduler/scheduler.model.ts |  12 +-
 .../webapp/src/app/shared/form-controls/index.ts   |   5 +-
 .../multi-level-select-dropdown.component.html     |  70 ++
 .../multi-level-select-dropdown.component.scss     | 319 +++++++
 .../multi-level-select-dropdown.component.ts       | 101 +++
 .../webapp/src/assets/styles/_dialogs.scss         |   2 +-
 .../resources/webapp/src/assets/styles/_theme.scss |   6 +
 .../webapp/src/dictionary/azure.dictionary.ts      |   6 +-
 .../resources/EnvironmentResourceTest.java         | 256 +-----
 .../resources/ExploratoryResourceTest.java         |  57 +-
 .../resources/ImageExploratoryResourceTest.java    |  25 +-
 .../resources/LibExploratoryResourceTest.java      | 602 +++++++------
 .../resources/SchedulerJobResourceTest.java        | 383 ++++----
 .../service/impl/ComputationalServiceImplTest.java | 993 +++++++++++----------
 .../service/impl/EnvironmentServiceImplTest.java   | 124 +--
 .../service/impl/ExploratoryServiceImplTest.java   | 586 ++++++------
 .../impl/ImageExploratoryServiceImplTest.java      |  61 +-
 .../service/impl/LibraryServiceImplTest.java       | 464 +++++-----
 .../service/impl/ReuploadKeyServiceImplTest.java   |  49 +-
 .../service/impl/SchedulerJobServiceImplTest.java  | 250 +++---
 88 files changed, 3126 insertions(+), 3041 deletions(-)

diff --cc infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
index 9624156,14953fe..c83208b
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
@@@ -40,136 -37,118 +40,135 @@@ if __name__ == "__main__"
      logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
                          level=logging.DEBUG,
                          filename=local_log_filepath)
 -
 -    print('Generating infrastructure names and tags')
 -    notebook_config = dict()
 -    notebook_config['service_base_name'] = (os.environ['conf_service_base_name']).lower().replace('_', '-')
 -    notebook_config['edge_user_name'] = (os.environ['edge_user_name']).lower().replace('_', '-')
 -    notebook_config['project_name'] = (os.environ['project_name']).lower().replace('_', '-')
 -    notebook_config['project_tag'] = (os.environ['project_name']).lower().replace('_', '-')
 -    notebook_config['endpoint_tag'] = (os.environ['endpoint_name']).lower().replace('_', '-')
 -    notebook_config['region'] = os.environ['gcp_region']
 -    notebook_config['zone'] = os.environ['gcp_zone']
 -
 -    edge_status = GCPMeta().get_instance_status('{0}-{1}-{2}-edge'.format(notebook_config['service_base_name'],
 -                                                                          notebook_config['project_name'],
 -                                                                          notebook_config['endpoint_tag']))
 -    if edge_status != 'RUNNING':
 -        logging.info('ERROR: Edge node is unavailable! Aborting...')
 -        print('ERROR: Edge node is unavailable! Aborting...')
 -        ssn_hostname = GCPMeta().get_private_ip_address(notebook_config['service_base_name'] + '-ssn')
 -        put_resource_status('edge', 'Unavailable', os.environ['ssn_dlab_path'], os.environ['conf_os_user'],
 -                            ssn_hostname)
 -        append_result("Edge node is unavailable")
 -        sys.exit(1)
 -
      try:
 -        if os.environ['gcp_vpc_name'] == '':
 -            raise KeyError
 -        else:
 -            notebook_config['vpc_name'] = os.environ['gcp_vpc_name']
 -    except KeyError:
 -        notebook_config['vpc_name'] = '{}-ssn-vpc'.format(notebook_config['service_base_name'])
 -    try:
 -        notebook_config['exploratory_name'] = (os.environ['exploratory_name']).lower().replace('_', '-')
 -    except:
 -        notebook_config['exploratory_name'] = ''
 -    notebook_config['subnet_name'] = '{0}-{1}-subnet'.format(notebook_config['service_base_name'],
 -                                                             notebook_config['project_name'])
 -    notebook_config['instance_size'] = os.environ['gcp_notebook_instance_size']
 -    notebook_config['ssh_key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
 -    notebook_config['notebook_service_account_name'] = '{}-{}-ps'.format(notebook_config['service_base_name'],
 -                                                                         notebook_config['project_name']).replace('_', '-')
 -
 -    if os.environ['conf_os_family'] == 'debian':
 -        initial_user = 'ubuntu'
 -        sudo_group = 'sudo'
 -    if os.environ['conf_os_family'] == 'redhat':
 -        initial_user = 'ec2-user'
 -        sudo_group = 'wheel'
 -    notebook_config['instance_name'] = '{0}-{1}-{2}-nb-{3}'.format(notebook_config['service_base_name'],
 -                                                               notebook_config['project_name'], os.environ['endpoint_name'],
 -                                                               notebook_config['exploratory_name'])
 -    notebook_config['primary_disk_size'] = (lambda x: '30' if x == 'deeplearning' else '12')(os.environ['application'])
 -    notebook_config['secondary_disk_size'] = os.environ['notebook_disk_size']
 -
 -    notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
 -    if notebook_config['shared_image_enabled'] == 'false':
 -        notebook_config['expected_primary_image_name'] = '{}-{}-{}-{}-primary-image'.format(
 -            notebook_config['service_base_name'], notebook_config['endpoint_tag'], notebook_config['project_name'],
 -            os.environ['application'])
 -        notebook_config['expected_secondary_image_name'] = '{}-{}-{}-{}-secondary-image'.format(
 -            notebook_config['service_base_name'], notebook_config['endpoint_tag'], notebook_config['project_name'],
 -            os.environ['application'])
 -    else:
 -        notebook_config['expected_primary_image_name'] = '{}-{}-{}-primary-image'.format(
 -            notebook_config['service_base_name'], notebook_config['endpoint_tag'], os.environ['application'])
 -        notebook_config['expected_secondary_image_name'] = '{}-{}-{}-secondary-image'.format(
 -            notebook_config['service_base_name'], notebook_config['endpoint_tag'], os.environ['application'])
 -    notebook_config['notebook_primary_image_name'] = (lambda x: os.environ['notebook_primary_image_name'] if x != 'None'
 -        else notebook_config['expected_primary_image_name'])(str(os.environ.get('notebook_primary_image_name')))
 -    print('Searching pre-configured images')
 -    notebook_config['primary_image_name'] = GCPMeta().get_image_by_name(notebook_config['expected_primary_image_name'])
 -    if notebook_config['primary_image_name'] == '':
 -        notebook_config['primary_image_name'] = os.environ['gcp_{}_image_name'.format(os.environ['conf_os_family'])]
 -    else:
 -        print('Pre-configured primary image found. Using: {}'.format(notebook_config['primary_image_name'].get('name')))
 -        notebook_config['primary_image_name'] = 'global/images/{}'.format(notebook_config['primary_image_name'].get('name'))
 -
 -    notebook_config['secondary_image_name'] = GCPMeta().get_image_by_name(notebook_config['expected_secondary_image_name'])
 -    if notebook_config['secondary_image_name'] == '':
 -        notebook_config['secondary_image_name'] = 'None'
 -    else:
 -        print('Pre-configured secondary image found. Using: {}'.format(notebook_config['secondary_image_name'].get('name')))
 -        notebook_config['secondary_image_name'] = 'global/images/{}'.format(notebook_config['secondary_image_name'].get('name'))
 -
 -    notebook_config['gpu_accelerator_type'] = 'None'
 -
 -    if os.environ['application'] in ('tensor', 'tensor-rstudio', 'deeplearning'):
 -        notebook_config['gpu_accelerator_type'] = os.environ['gcp_gpu_accelerator_type']
 -
 -    notebook_config['network_tag'] = '{0}-{1}-ps'.format(notebook_config['service_base_name'],
 -                                                         notebook_config['project_name'])
 -
 -    with open('/root/result.json', 'w') as f:
 -        data = {"notebook_name": notebook_config['instance_name'], "error": ""}
 -        json.dump(data, f)
 -
 -    additional_tags = os.environ['tags'].replace("': u'", ":").replace("', u'", ",").replace("{u'", "" ).replace(
 -        "'}", "").lower()
 -
 -    print('Additional tags will be added: {}'.format(additional_tags))
 -    notebook_config['labels'] = {"name": notebook_config['instance_name'],
 -                                 "sbn": notebook_config['service_base_name'],
 -                                 "product": "dlab"
 -                                 }
 -
 -    for tag in additional_tags.split(','):
 -        label_key = tag.split(':')[0]
 -        label_value = tag.split(':')[1].replace('_', '-')
 -        if '@' in label_value:
 -            label_value = label_value[:label_value.find('@')]
 -        if label_value != '':
 -            notebook_config['labels'].update({label_key: label_value})
 +        GCPMeta = dlab.meta_lib.GCPMeta()
 +        GCPActions = dlab.actions_lib.GCPActions()
 +        print('Generating infrastructure names and tags')
 +        notebook_config = dict()
 +        notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
 +        notebook_config['edge_user_name'] = (os.environ['edge_user_name'])
 +        notebook_config['project_name'] = (os.environ['project_name']).replace('_', '-').lower()
 +        notebook_config['project_tag'] = notebook_config['project_name']
 +        notebook_config['endpoint_name'] = os.environ['endpoint_name'].replace('_', '-').lower()
 +        notebook_config['endpoint_tag'] = notebook_config['endpoint_name']
 +        notebook_config['region'] = os.environ['gcp_region']
 +        notebook_config['zone'] = os.environ['gcp_zone']
 +
 +        edge_status = GCPMeta.get_instance_status('{0}-{1}-{2}-edge'.format(notebook_config['service_base_name'],
 +                                                                            notebook_config['project_name'],
 +                                                                            notebook_config['endpoint_tag']))
 +        if edge_status != 'RUNNING':
 +            logging.info('ERROR: Edge node is unavailable! Aborting...')
 +            print('ERROR: Edge node is unavailable! Aborting...')
 +            ssn_hostname = GCPMeta.get_private_ip_address(notebook_config['service_base_name'] + '-ssn')
 +            dlab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_dlab_path'], os.environ['conf_os_user'],
 +                                         ssn_hostname)
 +            dlab.fab.append_result("Edge node is unavailable")
 +            sys.exit(1)
  
 +        try:
 +            if os.environ['gcp_vpc_name'] == '':
 +                raise KeyError
 +            else:
 +                notebook_config['vpc_name'] = os.environ['gcp_vpc_name']
 +        except KeyError:
 +            notebook_config['vpc_name'] = '{}-vpc'.format(notebook_config['service_base_name'])
 +        try:
 +            notebook_config['exploratory_name'] = (os.environ['exploratory_name']).replace('_', '-').lower()
 +        except:
 +            notebook_config['exploratory_name'] = ''
 +        notebook_config['subnet_name'] = '{0}-{1}-{2}-subnet'.format(notebook_config['service_base_name'],
 +                                                                     notebook_config['project_name'],
 +                                                                     notebook_config['endpoint_tag'])
 +        notebook_config['instance_size'] = os.environ['gcp_notebook_instance_size']
 +        notebook_config['ssh_key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
 +        notebook_config['notebook_service_account_name'] = '{}-{}-{}-ps-sa'.format(notebook_config['service_base_name'],
 +                                                                                   notebook_config['project_name'],
 +                                                                                   notebook_config['endpoint_name'])
 +
 +        if os.environ['conf_os_family'] == 'debian':
 +            notebook_config['initial_user'] = 'ubuntu'
 +            notebook_config['sudo_group'] = 'sudo'
 +        if os.environ['conf_os_family'] == 'redhat':
 +            notebook_config['initial_user'] = 'ec2-user'
 +            notebook_config['sudo_group'] = 'wheel'
 +        notebook_config['instance_name'] = '{0}-{1}-{2}-nb-{3}'.format(notebook_config['service_base_name'],
 +                                                                       notebook_config['project_name'],
 +                                                                       notebook_config['endpoint_name'],
 +                                                                       notebook_config['exploratory_name'])
 +        notebook_config['primary_disk_size'] = (lambda x: '30' if x == 'deeplearning' else '12')(
 +            os.environ['application'])
 +        notebook_config['secondary_disk_size'] = os.environ['notebook_disk_size']
 +
 +        notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
 +        if notebook_config['shared_image_enabled'] == 'false':
 +            notebook_config['expected_primary_image_name'] = '{}-{}-{}-{}-primary-image'.format(
 +                notebook_config['service_base_name'], notebook_config['project_name'], notebook_config['endpoint_tag'],
 +                os.environ['application'])
 +            notebook_config['expected_secondary_image_name'] = '{}-{}-{}-{}-secondary-image'.format(
 +                notebook_config['service_base_name'], notebook_config['project_name'], notebook_config['endpoint_tag'],
 +                os.environ['application'])
 +        else:
 +            notebook_config['expected_primary_image_name'] = '{}-{}-{}-primary-image'.format(
 +                notebook_config['service_base_name'], notebook_config['endpoint_name'], os.environ['application'])
 +            notebook_config['expected_secondary_image_name'] = '{}-{}-{}-secondary-image'.format(
 +                notebook_config['service_base_name'], notebook_config['endpoint_name'], os.environ['application'])
 +        notebook_config['notebook_primary_image_name'] = \
 +            (lambda x: os.environ['notebook_primary_image_name'] if x != 'None'
 +             else notebook_config['expected_primary_image_name'])(str(os.environ.get('notebook_primary_image_name')))
 +        print('Searching pre-configured images')
 +        notebook_config['primary_image_name'] = GCPMeta.get_image_by_name(
 +            notebook_config['expected_primary_image_name'])
 +        if notebook_config['primary_image_name'] == '':
 +            notebook_config['primary_image_name'] = os.environ['gcp_{}_image_name'.format(os.environ['conf_os_family'])]
 +        else:
 +            print('Pre-configured primary image found. Using: {}'.format(
 +                notebook_config['primary_image_name'].get('name')))
 +            notebook_config['primary_image_name'] = 'global/images/{}'.format(
 +                notebook_config['primary_image_name'].get('name'))
 +
 +        notebook_config['secondary_image_name'] = GCPMeta.get_image_by_name(
 +            notebook_config['expected_secondary_image_name'])
 +        if notebook_config['secondary_image_name'] == '':
 +            notebook_config['secondary_image_name'] = 'None'
 +        else:
 +            print('Pre-configured secondary image found. Using: {}'.format(
 +                notebook_config['secondary_image_name'].get('name')))
 +            notebook_config['secondary_image_name'] = 'global/images/{}'.format(
 +                notebook_config['secondary_image_name'].get('name'))
 +
 +        notebook_config['gpu_accelerator_type'] = 'None'
 +
 +        if os.environ['application'] in ('tensor', 'tensor-rstudio', 'deeplearning'):
 +            notebook_config['gpu_accelerator_type'] = os.environ['gcp_gpu_accelerator_type']
 +
 +        notebook_config['network_tag'] = '{0}-{1}-{2}-ps'.format(notebook_config['service_base_name'],
 +                                                                 notebook_config['project_name'],
 +                                                                 notebook_config['endpoint_name'])
 +
 +        with open('/root/result.json', 'w') as f:
 +            data = {"notebook_name": notebook_config['instance_name'], "error": ""}
 +            json.dump(data, f)
 +
-         additional_tags = os.environ['tags'].replace("': u'", ":").replace("', u'", ",").replace("{u'", "").replace(
-             "'}", "")
++        additional_tags = os.environ['tags'].replace("': u'", ":").replace("', u'", ",").replace("{u'", "" ).replace(
++            "'}", "").lower()
 +
 +        print('Additional tags will be added: {}'.format(additional_tags))
 +        notebook_config['labels'] = {"name": notebook_config['instance_name'],
 +                                     "sbn": notebook_config['service_base_name'],
 +                                     "product": "dlab"
 +                                     }
 +
 +        for tag in additional_tags.split(','):
 +            label_key = tag.split(':')[0]
-             label_value = tag.split(':')[1]
-             if label_key == 'user_tag':
-                 if '@' in label_value:
-                     label_value = label_value[:label_value.find('@')]
++            label_value = tag.split(':')[1].replace('_', '-')
++            if '@' in label_value:
++                label_value = label_value[:label_value.find('@')]
 +            if label_value != '':
 +                notebook_config['labels'].update({label_key: label_value})
 +    except Exception as err:
 +        dlab.fab.append_result("Failed to generate variables dictionary.", str(err))
 +        sys.exit(1)
      # launching instance for notebook server
      try:
          logging.info('[CREATE NOTEBOOK INSTANCE]')
diff --cc infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
index 4b2d8c1,dca9a33..993b8e7
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
@@@ -43,81 -42,73 +43,80 @@@ if __name__ == "__main__"
                          level=logging.INFO,
                          filename=local_log_filepath)
      try:
 -        os.environ['exploratory_name']
 -    except:
 -        os.environ['exploratory_name'] = ''
 -    if os.path.exists('/response/.dataproc_creating_{}'.format(os.environ['exploratory_name'])):
 -        time.sleep(30)
 -
 -    print('Generating infrastructure names and tags')
 -    dataproc_conf = dict()
 -    try:
 -        dataproc_conf['exploratory_name'] = (os.environ['exploratory_name']).lower().replace('_', '-')
 -    except:
 -        dataproc_conf['exploratory_name'] = ''
 -    try:
 -        dataproc_conf['computational_name'] = (os.environ['computational_name']).lower().replace('_', '-')
 -    except:
 -        dataproc_conf['computational_name'] = ''
 -    dataproc_conf['service_base_name'] = (os.environ['conf_service_base_name']).lower().replace('_', '-')
 -    dataproc_conf['edge_user_name'] = (os.environ['edge_user_name']).lower().replace('_', '-')
 -    dataproc_conf['project_name'] = (os.environ['project_name']).lower().replace('_', '-')
 -    dataproc_conf['project_tag'] = (os.environ['project_name']).lower().replace('_', '-')
 -    dataproc_conf['endpoint_tag'] = (os.environ['endpoint_name']).lower().replace('_', '-')
 -    dataproc_conf['endpoint_name'] = (os.environ['endpoint_name']).lower().replace('_', '-')
 -    dataproc_conf['key_name'] = os.environ['conf_key_name']
 -    dataproc_conf['key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
 -    dataproc_conf['region'] = os.environ['gcp_region']
 -    dataproc_conf['zone'] = os.environ['gcp_zone']
 -    dataproc_conf['subnet'] = '{0}-{1}-subnet'.format(dataproc_conf['service_base_name'], dataproc_conf['project_name'])
 -    dataproc_conf['cluster_name'] = '{0}-{1}-des-{2}-{3}'.format(dataproc_conf['service_base_name'], dataproc_conf['project_name'],
 -                                                                 dataproc_conf['exploratory_name'], dataproc_conf['computational_name'])
 -    dataproc_conf['cluster_tag'] = '{0}-{1}-ps'.format(dataproc_conf['service_base_name'], dataproc_conf['project_name'])
 -    dataproc_conf['bucket_name'] = '{0}-{1}-{2}-bucket'.format(dataproc_conf['service_base_name'],
 +        GCPMeta = dlab.meta_lib.GCPMeta()
 +        GCPActions = dlab.actions_lib.GCPActions()
 +        print('Generating infrastructure names and tags')
 +        dataproc_conf = dict()
 +        if 'exploratory_name' in os.environ:
 +            dataproc_conf['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
 +        else:
 +            dataproc_conf['exploratory_name'] = ''
 +        if 'computational_name' in os.environ:
 +            dataproc_conf['computational_name'] = os.environ['computational_name'].replace('_', '-').lower()
 +        else:
 +            dataproc_conf['computational_name'] = ''
 +        if os.path.exists('/response/.dataproc_creating_{}'.format(dataproc_conf['exploratory_name'])):
 +            time.sleep(30)
 +        dataproc_conf['service_base_name'] = (os.environ['conf_service_base_name'])
 +        dataproc_conf['edge_user_name'] = (os.environ['edge_user_name'])
 +        dataproc_conf['project_name'] = (os.environ['project_name']).replace('_', '-').lower()
 +        dataproc_conf['project_tag'] = dataproc_conf['project_name']
 +        dataproc_conf['endpoint_name'] = (os.environ['endpoint_name']).replace('_', '-').lower()
 +        dataproc_conf['endpoint_tag'] = dataproc_conf['endpoint_name']
 +        dataproc_conf['key_name'] = os.environ['conf_key_name']
 +        dataproc_conf['key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
 +        dataproc_conf['region'] = os.environ['gcp_region']
 +        dataproc_conf['zone'] = os.environ['gcp_zone']
 +        dataproc_conf['subnet'] = '{0}-{1}-{2}-subnet'.format(dataproc_conf['service_base_name'],
 +                                                              dataproc_conf['project_name'],
 +                                                              dataproc_conf['endpoint_name'])
 +        dataproc_conf['cluster_name'] = '{0}-{1}-{2}-des-{3}'.format(dataproc_conf['service_base_name'],
 +                                                                     dataproc_conf['project_name'],
 +                                                                     dataproc_conf['endpoint_name'],
 +                                                                     dataproc_conf['computational_name'])
 +        dataproc_conf['cluster_tag'] = '{0}-{1}-{2}-ps'.format(dataproc_conf['service_base_name'],
                                                                 dataproc_conf['project_name'],
                                                                 dataproc_conf['endpoint_name'])
 -    dataproc_conf['release_label'] = os.environ['dataproc_version']
 -
 -    additional_tags = os.environ['tags'].replace("': u'", ":").replace("', u'", ",").replace("{u'", "" ).replace(
 -        "'}", "").lower()
 -
 -    dataproc_conf['cluster_labels'] = {
 -        os.environ['notebook_instance_name']: "not-configured",
 -        "name": dataproc_conf['cluster_name'],
 -        "sbn": dataproc_conf['service_base_name'],
 -        "notebook_name": os.environ['notebook_instance_name'],
 -        "product": "dlab",
 -        "computational_name": dataproc_conf['computational_name']
 -    }
 -
 -    for tag in additional_tags.split(','):
 -        label_key = tag.split(':')[0]
 -        label_value = tag.split(':')[1].replace('_', '-')
 -        if '@' in label_value:
 -            label_value = label_value[:label_value.find('@')]
 -        if label_value != '':
 -            dataproc_conf['cluster_labels'].update({label_key: label_value})
 -
 -    dataproc_conf['dataproc_service_account_name'] = '{0}-{1}-ps'.format(dataproc_conf['service_base_name'],
 -                                                                         dataproc_conf['project_name'])
 -    dataproc_conf['dataproc_unique_index'] = GCPMeta().get_index_by_service_account_name(dataproc_conf['dataproc_service_account_name'])
 -    service_account_email = "{}-{}@{}.iam.gserviceaccount.com".format(dataproc_conf['service_base_name'],
 -                                                                         dataproc_conf['dataproc_unique_index'],
 -                                                                         os.environ['gcp_project_id'])
 -    dataproc_conf['edge_instance_hostname'] = '{0}-{1}-{2}-edge'.format(dataproc_conf['service_base_name'],
 -                                                                        dataproc_conf['project_name'],
 -                                                                        dataproc_conf['endpoint_name'])
 -    dataproc_conf['dlab_ssh_user'] = os.environ['conf_os_user']
 +        dataproc_conf['bucket_name'] = '{0}-{1}-{2}-bucket'.format(dataproc_conf['service_base_name'],
 +                                                                   dataproc_conf['project_name'],
 +                                                                   dataproc_conf['endpoint_name'])
 +        dataproc_conf['release_label'] = os.environ['dataproc_version']
 +        additional_tags = os.environ['tags'].replace("': u'", ":").replace("', u'", ",").replace("{u'", "").replace(
-             "'}", "")
++            "'}", "").lower()
 +
 +        dataproc_conf['cluster_labels'] = {
 +            os.environ['notebook_instance_name']: "not-configured",
 +            "name": dataproc_conf['cluster_name'],
 +            "sbn": dataproc_conf['service_base_name'],
 +            "notebook_name": os.environ['notebook_instance_name'],
 +            "product": "dlab",
 +            "computational_name": dataproc_conf['computational_name']
 +        }
 +
 +        for tag in additional_tags.split(','):
 +            label_key = tag.split(':')[0]
-             label_value = tag.split(':')[1]
-             if label_key == 'user_tag':
-                 if '@' in label_value:
-                     label_value = label_value[:label_value.find('@')]
++            label_value = tag.split(':')[1].replace('_', '-')
++            if '@' in label_value:
++                label_value = label_value[:label_value.find('@')]
 +            if label_value != '':
 +                dataproc_conf['cluster_labels'].update({label_key: label_value})
 +        dataproc_conf['dataproc_service_account_name'] = '{0}-{1}-{2}-ps-sa'.format(dataproc_conf['service_base_name'],
 +                                                                                    dataproc_conf['project_name'],
 +                                                                                    dataproc_conf['endpoint_name'])
 +        dataproc_conf['dataproc_unique_index'] = GCPMeta.get_index_by_service_account_name(
 +            dataproc_conf['dataproc_service_account_name'])
 +        service_account_email = "{}-{}@{}.iam.gserviceaccount.com".format(dataproc_conf['service_base_name'],
 +                                                                          dataproc_conf['dataproc_unique_index'],
 +                                                                          os.environ['gcp_project_id'])
 +        dataproc_conf['edge_instance_hostname'] = '{0}-{1}-{2}-edge'.format(dataproc_conf['service_base_name'],
 +                                                                            dataproc_conf['project_name'],
 +                                                                            dataproc_conf['endpoint_name'])
 +        dataproc_conf['dlab_ssh_user'] = os.environ['conf_os_user']
 +    except Exception as err:
 +        dlab.fab.append_result("Failed to generate variables dictionary. Exception:" + str(err))
 +        sys.exit(1)
  
 -    edge_status = GCPMeta().get_instance_status(dataproc_conf['edge_instance_hostname'])
 +    edge_status = GCPMeta.get_instance_status(dataproc_conf['edge_instance_hostname'])
      if edge_status != 'RUNNING':
          logging.info('ERROR: Edge node is unavailable! Aborting...')
          print('ERROR: Edge node is unavailable! Aborting...')
diff --cc infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
index 447fb26,dcbb333..262868c
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
@@@ -40,144 -38,134 +40,143 @@@ if __name__ == "__main__"
      logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
                          level=logging.DEBUG,
                          filename=local_log_filepath)
 +    try:
 +        GCPMeta = dlab.meta_lib.GCPMeta()
 +        GCPActions = dlab.actions_lib.GCPActions()
 +        print('Generating infrastructure names and tags')
 +        data_engine = dict()
 +        data_engine['service_base_name'] = (os.environ['conf_service_base_name'])
 +        data_engine['edge_user_name'] = (os.environ['edge_user_name'])
 +        data_engine['project_name'] = (os.environ['project_name']).replace('_', '-').lower()
 +        data_engine['project_tag'] = data_engine['project_name']
 +        data_engine['endpoint_name'] = os.environ['endpoint_name'].replace('_', '-').lower()
 +        data_engine['endpoint_tag'] = data_engine['endpoint_name']
 +        data_engine['region'] = os.environ['gcp_region']
 +        data_engine['zone'] = os.environ['gcp_zone']
 +
 +        edge_status = GCPMeta.get_instance_status('{0}-{1}-{2}-edge'.format(data_engine['service_base_name'],
 +                                                                            data_engine['project_name'],
 +                                                                            data_engine['endpoint_name']))
 +        if edge_status != 'RUNNING':
 +            logging.info('ERROR: Edge node is unavailable! Aborting...')
 +            print('ERROR: Edge node is unavailable! Aborting...')
 +            ssn_hostname = GCPMeta.get_private_ip_address(data_engine['service_base_name'] + '-ssn')
 +            dlab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_dlab_path'], os.environ['conf_os_user'],
 +                                         ssn_hostname)
 +            dlab.fab.append_result("Edge node is unavailable")
 +            sys.exit(1)
  
 -    print('Generating infrastructure names and tags')
 -    data_engine = dict()
 -    data_engine['service_base_name'] = (os.environ['conf_service_base_name']).lower().replace('_', '-')
 -    data_engine['edge_user_name'] = (os.environ['edge_user_name']).lower().replace('_', '-')
 -    data_engine['project_name'] = (os.environ['project_name']).lower().replace('_', '-')
 -    data_engine['project_tag'] = (os.environ['project_name']).lower().replace('_', '-')
 -    data_engine['endpoint_tag'] = (os.environ['endpoint_name']).lower().replace('_', '-')
 -    data_engine['region'] = os.environ['gcp_region']
 -    data_engine['zone'] = os.environ['gcp_zone']
 -    data_engine['endpoint_name'] = os.environ['endpoint_name']
 -
 -    edge_status = GCPMeta().get_instance_status('{0}-{1}-{2}-edge'.format(data_engine['service_base_name'],
 -                                                                          data_engine['project_name'],
 -                                                                          data_engine['endpoint_name']))
 -    if edge_status != 'RUNNING':
 -        logging.info('ERROR: Edge node is unavailable! Aborting...')
 -        print('ERROR: Edge node is unavailable! Aborting...')
 -        ssn_hostname = GCPMeta().get_private_ip_address(data_engine['service_base_name'] + '-ssn')
 -        put_resource_status('edge', 'Unavailable', os.environ['ssn_dlab_path'], os.environ['conf_os_user'],
 -                            ssn_hostname)
 -        append_result("Edge node is unavailable")
 -        sys.exit(1)
 +        try:
 +            if os.environ['gcp_vpc_name'] == '':
 +                raise KeyError
 +            else:
 +                data_engine['vpc_name'] = os.environ['gcp_vpc_name']
 +        except KeyError:
 +            data_engine['vpc_name'] = '{}-vpc'.format(data_engine['service_base_name'])
 +        if 'exploratory_name' in os.environ:
 +            data_engine['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
 +        else:
 +            data_engine['exploratory_name'] = ''
 +        if 'computational_name' in os.environ:
 +            data_engine['computational_name'] = os.environ['computational_name'].replace('_', '-').lower()
 +        else:
 +            data_engine['computational_name'] = ''
 +
 +        data_engine['subnet_name'] = '{0}-{1}-{2}-subnet'.format(data_engine['service_base_name'],
 +                                                                 data_engine['project_name'],
 +                                                                 data_engine['endpoint_name'])
 +        data_engine['master_size'] = os.environ['gcp_dataengine_master_size']
 +        data_engine['slave_size'] = os.environ['gcp_dataengine_slave_size']
 +        data_engine['ssh_key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
 +        data_engine['dataengine_service_account_name'] = '{}-{}-{}-ps-sa'.format(data_engine['service_base_name'],
 +                                                                                 data_engine['project_name'],
 +                                                                                 data_engine['endpoint_name'])
 +
 +        if os.environ['conf_os_family'] == 'debian':
 +            initial_user = 'ubuntu'
 +            sudo_group = 'sudo'
 +        if os.environ['conf_os_family'] == 'redhat':
 +            initial_user = 'ec2-user'
 +            sudo_group = 'wheel'
 +        data_engine['cluster_name'] = "{}-{}-{}-de-{}".format(data_engine['service_base_name'],
 +                                                              data_engine['project_name'],
 +                                                              data_engine['endpoint_name'],
 +                                                              data_engine['computational_name'])
 +        data_engine['master_node_name'] = data_engine['cluster_name'] + '-m'
 +        data_engine['slave_node_name'] = data_engine['cluster_name'] + '-s'
 +        data_engine['instance_count'] = int(os.environ['dataengine_instance_count'])
 +        data_engine['notebook_name'] = os.environ['notebook_instance_name']
 +
 +        data_engine['primary_disk_size'] = '30'
 +        data_engine['secondary_disk_size'] = os.environ['notebook_disk_size']
 +
 +        data_engine['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
 +        if data_engine['shared_image_enabled'] == 'false':
 +            data_engine['expected_primary_image_name'] = '{}-{}-{}-{}-primary-image'.format(
 +                data_engine['service_base_name'], data_engine['project_name'], data_engine['endpoint_tag'],
 +                os.environ['application'])
 +            data_engine['expected_secondary_image_name'] = '{}-{}-{}-{}-secondary-image'.format(
 +                data_engine['service_base_name'], data_engine['project_name'], data_engine['endpoint_tag'],
 +                os.environ['application'])
 +        else:
 +            data_engine['expected_primary_image_name'] = '{}-{}-{}-primary-image'.format(
 +                data_engine['service_base_name'], data_engine['endpoint_tag'], os.environ['application'])
 +            data_engine['expected_secondary_image_name'] = '{}-{}-{}-secondary-image'.format(
 +                data_engine['service_base_name'], data_engine['endpoint_tag'], os.environ['application'])
 +        data_engine['notebook_primary_image_name'] = (lambda x: os.environ['notebook_primary_image_name'] if x != 'None'
 +        else data_engine['expected_primary_image_name'])(str(os.environ.get('notebook_primary_image_name')))
 +        print('Searching pre-configured images')
 +        data_engine['primary_image_name'] = GCPMeta.get_image_by_name(data_engine['notebook_primary_image_name'])
 +        if data_engine['primary_image_name'] == '':
 +            data_engine['primary_image_name'] = os.environ['gcp_{}_image_name'.format(os.environ['conf_os_family'])]
 +        else:
 +            print('Pre-configured primary image found. Using: {}'.format(data_engine['primary_image_name'].get('name')))
 +            data_engine['primary_image_name'] = 'global/images/{}'.format(
 +                data_engine['primary_image_name'].get('name'))
  
 -    try:
 -        if os.environ['gcp_vpc_name'] == '':
 -            raise KeyError
 +        data_engine['secondary_image_name'] = GCPMeta.get_image_by_name(data_engine['expected_secondary_image_name'])
 +        if data_engine['secondary_image_name'] == '':
 +            data_engine['secondary_image_name'] = 'None'
          else:
 -            data_engine['vpc_name'] = os.environ['gcp_vpc_name']
 -    except KeyError:
 -        data_engine['vpc_name'] = '{}-ssn-vpc'.format(data_engine['service_base_name'])
 -    try:
 -        data_engine['exploratory_name'] = (os.environ['exploratory_name']).lower().replace('_', '-')
 -    except:
 -        data_engine['exploratory_name'] = ''
 -    try:
 -        data_engine['computational_name'] = os.environ['computational_name'].lower().replace('_', '-')
 -    except:
 -        data_engine['computational_name'] = ''
 -
 -    data_engine['subnet_name'] = '{0}-{1}-subnet'.format(data_engine['service_base_name'],
 -                                                         data_engine['project_name'])
 -    data_engine['master_size'] = os.environ['gcp_dataengine_master_size']
 -    data_engine['slave_size'] = os.environ['gcp_dataengine_slave_size']
 -    data_engine['ssh_key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
 -    data_engine['dataengine_service_account_name'] = '{}-{}-ps'.format(data_engine['service_base_name'],
 -                                                                       data_engine['project_name'])
 -
 -    if os.environ['conf_os_family'] == 'debian':
 -        initial_user = 'ubuntu'
 -        sudo_group = 'sudo'
 -    if os.environ['conf_os_family'] == 'redhat':
 -        initial_user = 'ec2-user'
 -        sudo_group = 'wheel'
 -    data_engine['cluster_name'] = data_engine['service_base_name'] + '-' + data_engine['project_name'] + \
 -                                  '-de-' + data_engine['exploratory_name'] + '-' + \
 -                                  data_engine['computational_name']
 -    data_engine['master_node_name'] = data_engine['cluster_name'] + '-m'
 -    data_engine['slave_node_name'] = data_engine['cluster_name'] + '-s'
 -    data_engine['instance_count'] = int(os.environ['dataengine_instance_count'])
 -    data_engine['notebook_name'] = os.environ['notebook_instance_name']
 -
 -    data_engine['primary_disk_size'] = '30'
 -    data_engine['secondary_disk_size'] = os.environ['notebook_disk_size']
 -
 -    data_engine['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
 -    if data_engine['shared_image_enabled'] == 'false':
 -        data_engine['expected_primary_image_name'] = '{}-{}-{}-{}-primary-image'.format(
 -            data_engine['service_base_name'], data_engine['endpoint_tag'], data_engine['project_name'],
 -            os.environ['application'])
 -        data_engine['expected_secondary_image_name'] = '{}-{}-{}-{}-secondary-image'.format(
 -            data_engine['service_base_name'], data_engine['endpoint_tag'], data_engine['project_name'],
 -            os.environ['application'])
 -    else:
 -        data_engine['expected_primary_image_name'] = '{}-{}-{}-primary-image'.format(
 -            data_engine['service_base_name'], data_engine['endpoint_tag'], os.environ['application'])
 -        data_engine['expected_secondary_image_name'] = '{}-{}-{}-secondary-image'.format(
 -            data_engine['service_base_name'], data_engine['endpoint_tag'], os.environ['application'])
 -    data_engine['notebook_primary_image_name'] = (lambda x: os.environ['notebook_primary_image_name'] if x != 'None'
 -    else data_engine['expected_primary_image_name'])(str(os.environ.get('notebook_primary_image_name')))
 -    print('Searching pre-configured images')
 -    data_engine['primary_image_name'] = GCPMeta().get_image_by_name(data_engine['notebook_primary_image_name'])
 -    if data_engine['primary_image_name'] == '':
 -        data_engine['primary_image_name'] = os.environ['gcp_{}_image_name'.format(os.environ['conf_os_family'])]
 -    else:
 -        print('Pre-configured primary image found. Using: {}'.format(data_engine['primary_image_name'].get('name')))
 -        data_engine['primary_image_name'] = 'global/images/{}'.format(
 -            data_engine['primary_image_name'].get('name'))
 -
 -    data_engine['secondary_image_name'] = GCPMeta().get_image_by_name(data_engine['expected_secondary_image_name'])
 -    if data_engine['secondary_image_name'] == '':
 -        data_engine['secondary_image_name'] = 'None'
 -    else:
 -        print('Pre-configured secondary image found. Using: {}'.format(data_engine['secondary_image_name'].get('name')))
 -        data_engine['secondary_image_name'] = 'global/images/{}'.format(data_engine['secondary_image_name'].get('name'))
 -
 -    with open('/root/result.json', 'w') as f:
 -        data = {"hostname": data_engine['cluster_name'], "error": ""}
 -        json.dump(data, f)
 -
 -    data_engine['gpu_accelerator_type'] = 'None'
 -    if os.environ['application'] in ('tensor', 'tensor-rstudio', 'deeplearning'):
 -        data_engine['gpu_accelerator_type'] = os.environ['gcp_gpu_accelerator_type']
 -    data_engine['network_tag'] = '{0}-{1}-ps'.format(data_engine['service_base_name'],
 -                                                     data_engine['project_name'])
 -
 -    additional_tags = os.environ['tags'].replace("': u'", ":").replace("', u'", ",").replace("{u'", "" ).replace(
 -        "'}", "").lower()
 -
 -    data_engine['slave_labels'] = {"name": data_engine['cluster_name'],
 -                                   "sbn": data_engine['service_base_name'],
 -                                   "type": "slave",
 -                                   "notebook_name": data_engine['notebook_name'],
 -                                   "product": "dlab"}
 -    data_engine['master_labels'] = {"name": data_engine['cluster_name'],
 -                                    "sbn": data_engine['service_base_name'],
 -                                    "type": "master",
 -                                    "notebook_name": data_engine['notebook_name'],
 -                                    "product": "dlab"}
 -
 -    for tag in additional_tags.split(','):
 -        label_key = tag.split(':')[0]
 -        label_value = tag.split(':')[1].replace('_', '-')
 -        if '@' in label_value:
 -            label_value = label_value[:label_value.find('@')]
 -        if label_value != '':
 -            data_engine['slave_labels'].update({label_key: label_value})
 -            data_engine['master_labels'].update({label_key: label_value})
 +            print('Pre-configured secondary image found. Using: {}'.format(
 +                data_engine['secondary_image_name'].get('name')))
 +            data_engine['secondary_image_name'] = 'global/images/{}'.format(
 +                data_engine['secondary_image_name'].get('name'))
 +
 +        with open('/root/result.json', 'w') as f:
 +            data = {"hostname": data_engine['cluster_name'], "error": ""}
 +            json.dump(data, f)
 +
 +        data_engine['gpu_accelerator_type'] = 'None'
 +        if os.environ['application'] in ('tensor', 'tensor-rstudio', 'deeplearning'):
 +            data_engine['gpu_accelerator_type'] = os.environ['gcp_gpu_accelerator_type']
 +        data_engine['network_tag'] = '{0}-{1}-{2}-ps'.format(data_engine['service_base_name'],
 +                                                             data_engine['project_name'], data_engine['endpoint_name'])
 +        additional_tags = os.environ['tags'].replace("': u'", ":").replace("', u'", ",").replace("{u'", "").replace(
-             "'}", "")
++            "'}", "").lower()
 +
 +        data_engine['slave_labels'] = {"name": data_engine['cluster_name'],
 +                                       "sbn": data_engine['service_base_name'],
 +                                       "type": "slave",
 +                                       "notebook_name": data_engine['notebook_name'],
 +                                       "product": "dlab"}
 +        data_engine['master_labels'] = {"name": data_engine['cluster_name'],
 +                                        "sbn": data_engine['service_base_name'],
 +                                        "type": "master",
 +                                        "notebook_name": data_engine['notebook_name'],
 +                                        "product": "dlab"}
 +
 +        for tag in additional_tags.split(','):
 +            label_key = tag.split(':')[0]
-             label_value = tag.split(':')[1]
-             if label_key == 'user_tag':
-                 if '@' in label_value:
-                     label_value = label_value[:label_value.find('@')]
++            label_value = tag.split(':')[1].replace('_', '-')
++            if '@' in label_value:
++                label_value = label_value[:label_value.find('@')]
 +            if label_value != '':
 +                data_engine['slave_labels'].update({label_key: label_value})
 +                data_engine['master_labels'].update({label_key: label_value})
 +    except Exception as err:
 +        dlab.fab.append_result("Failed to generate variables dictionary. Exception:" + str(err))
 +        sys.exit(1)
  
      try:
          logging.info('[CREATE MASTER NODE]')
diff --cc services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ExploratoryDAO.java
index 93ed1d3,e634487..3a18dab
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ExploratoryDAO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ExploratoryDAO.java
@@@ -155,29 -136,7 +136,11 @@@ public class ExploratoryDAO extends Bas
  		return getUserInstances(and(eq(PROJECT, project)), false);
  	}
  
 +	public List<UserInstanceDTO> fetchExploratoryFieldsForProjectWithComp(String project) {
 +		return getUserInstances(and(eq(PROJECT, project)), true);
 +	}
 +
  	/**
- 	 * Finds and returns the info of all user's notebooks whose status is present among predefined ones.
- 	 *
- 	 * @param user                        user name.
- 	 * @param computationalFieldsRequired true/false.
- 	 * @param statuses                    array of statuses.
- 	 */
- 	public List<UserInstanceDTO> fetchUserExploratoriesWhereStatusIn(String user, boolean computationalFieldsRequired,
- 																	 UserInstanceStatus... statuses) {
- 		final List<String> statusList = statusList(statuses);
- 		return getUserInstances(
- 				and(
- 						eq(USER, user),
- 						in(STATUS, statusList)
- 				),
- 				computationalFieldsRequired);
- 	}
- 
- 	/**
  	 * Finds and returns the info of all user's notebooks whose status or status of affiliated computational resource
  	 * is present among predefined ones.
  	 *
diff --cc services/self-service/src/main/java/com/epam/dlab/backendapi/service/ComputationalService.java
index 4c489d0,f686b59..4a6f392
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/ComputationalService.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/ComputationalService.java
@@@ -21,11 -21,8 +21,9 @@@ package com.epam.dlab.backendapi.servic
  
  import com.epam.dlab.auth.UserInfo;
  import com.epam.dlab.backendapi.resources.dto.ComputationalCreateFormDTO;
 +import com.epam.dlab.backendapi.resources.dto.ComputationalTemplatesDTO;
  import com.epam.dlab.backendapi.resources.dto.SparkStandaloneClusterCreateForm;
- import com.epam.dlab.dto.UserInstanceStatus;
  import com.epam.dlab.dto.aws.computational.ClusterConfig;
- import com.epam.dlab.dto.base.DataEngineType;
  import com.epam.dlab.dto.computational.UserComputationalResource;
  
  import java.util.List;


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org