You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@dlab.apache.org by om...@apache.org on 2020/03/05 15:14:04 UTC
[incubator-dlab] branch DLAB-1546 updated (6ebf686 -> 23ec629)
This is an automated email from the ASF dual-hosted git repository.
omartushevskyi pushed a change to branch DLAB-1546
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git.
from 6ebf686 [DLAB-1546]: added name convention changes
add 14f0044 [DLAB-1599]: Adjusted billing acording to multicloud support (#632)
add 79ca168 [DLAB-1510] All resources are stopped with project (#611)
add 532573c [DLAB-1606]: Fixed small css bugs (#634)
add 30045cd [DLAB-1609]: Conveyed 'Resource Type' value to UI (#635)
add f778f0c [DlAB-1520] Filter the projects with terminated nodes (#633)
add a7c9a81 DLAB-1539 added check if custom tag is defined
add 1a0355b [DLAB-1539]: changed if statement for custom tag
add 96f901f [DLAB-1539]: changed how additional labels are assigned
add 1e1d4fa Merge pull request #636 from apache/DLAB-1539
new 23ec629 Merge branch 'develop' of github.com:apache/incubator-dlab into DLAB-1546
The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails. The revisions
listed as "add" were already present in the repository and have only
been added to this reference.
Summary of changes:
...common_notebook_configure_dataengine-service.py | 21 ++++++++-------
.../general/scripts/gcp/common_prepare_notebook.py | 24 ++++++++---------
.../scripts/gcp/dataengine-service_prepare.py | 25 ++++++++----------
.../src/general/scripts/gcp/dataengine_prepare.py | 30 +++++++++-------------
.../dlab/backendapi/resources/ProjectResource.java | 23 +----------------
.../service/impl/ProjectServiceImpl.java | 1 +
.../management-grid/management-grid.component.html | 4 ++-
.../management-grid/management-grid.component.scss | 26 +++++++++++--------
.../administration/project/project-data.service.ts | 11 +++++---
.../project-list/project-list.component.html | 4 ++-
.../project-list/project-list.component.scss | 8 +++++-
.../reporting-grid/reporting-grid.component.scss | 29 +++++++++++++--------
.../src/app/reporting/reporting.component.ts | 8 +++---
.../resources-grid/resources-grid.component.html | 4 ++-
.../resources-grid/resources-grid.component.scss | 12 +++++++++
.../resources/webapp/src/assets/styles/_theme.scss | 2 --
.../webapp/src/dictionary/azure.dictionary.ts | 2 +-
.../backendapi/resources/ProjectResourceTest.java | 30 ++++++++++++++++++----
18 files changed, 145 insertions(+), 119 deletions(-)
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org
[incubator-dlab] 01/01: Merge branch 'develop' of
github.com:apache/incubator-dlab into DLAB-1546
Posted by om...@apache.org.
This is an automated email from the ASF dual-hosted git repository.
omartushevskyi pushed a commit to branch DLAB-1546
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git
commit 23ec6299c1ec8ae0cc8cc0938565e74cdb9a7992
Merge: 6ebf686 1e1d4fa
Author: Oleh Martushevskyi <Ol...@epam.com>
AuthorDate: Thu Mar 5 17:13:45 2020 +0200
Merge branch 'develop' of github.com:apache/incubator-dlab into DLAB-1546
Conflicts:
infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
...common_notebook_configure_dataengine-service.py | 21 ++++++++-------
.../general/scripts/gcp/common_prepare_notebook.py | 24 ++++++++---------
.../scripts/gcp/dataengine-service_prepare.py | 25 ++++++++----------
.../src/general/scripts/gcp/dataengine_prepare.py | 30 +++++++++-------------
.../dlab/backendapi/resources/ProjectResource.java | 23 +----------------
.../service/impl/ProjectServiceImpl.java | 1 +
.../management-grid/management-grid.component.html | 4 ++-
.../management-grid/management-grid.component.scss | 26 +++++++++++--------
.../administration/project/project-data.service.ts | 11 +++++---
.../project-list/project-list.component.html | 4 ++-
.../project-list/project-list.component.scss | 8 +++++-
.../reporting-grid/reporting-grid.component.scss | 29 +++++++++++++--------
.../src/app/reporting/reporting.component.ts | 8 +++---
.../resources-grid/resources-grid.component.html | 4 ++-
.../resources-grid/resources-grid.component.scss | 12 +++++++++
.../resources/webapp/src/assets/styles/_theme.scss | 2 --
.../webapp/src/dictionary/azure.dictionary.ts | 2 +-
.../backendapi/resources/ProjectResourceTest.java | 30 ++++++++++++++++++----
18 files changed, 145 insertions(+), 119 deletions(-)
diff --cc infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine-service.py
index 5738c2c,0f5ab39..3fb10d3
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine-service.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine-service.py
@@@ -86,15 -70,19 +81,19 @@@ if __name__ == "__main__"
os.environ['notebook_instance_name']: "configured",
"name": notebook_config['cluster_name'],
"sbn": notebook_config['service_base_name'],
- "user": notebook_config['user_tag'],
"notebook_name": os.environ['notebook_instance_name'],
- "project_tag": notebook_config['project_tag'],
- "endpoint_tag": notebook_config['endpoint_tag'],
"product": "dlab",
- "computational_name": (os.environ['computational_name']).lower().replace('_', '-')
+ "computational_name": (os.environ['computational_name'].replace('_', '-').lower())
}
- if notebook_config['custom_tag'] != '':
- notebook_config['cluster_labels'].update({'custom_tag': notebook_config['custom_tag']})
+
+ for tag in additional_tags.split(','):
+ label_key = tag.split(':')[0]
+ label_value = tag.split(':')[1]
+ if label_key == 'user_tag':
+ if '@' in label_value:
+ label_value = label_value[:label_value.find('@')]
+ if label_value != '':
+ notebook_config['cluster_labels'].update({label_key: label_value})
try:
logging.info('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
diff --cc infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
index 85d7cc8,96ea2da..9624156
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
@@@ -40,140 -37,118 +40,136 @@@ if __name__ == "__main__"
logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG,
filename=local_log_filepath)
-
- print('Generating infrastructure names and tags')
- notebook_config = dict()
- notebook_config['service_base_name'] = (os.environ['conf_service_base_name']).lower().replace('_', '-')
- notebook_config['edge_user_name'] = (os.environ['edge_user_name']).lower().replace('_', '-')
- notebook_config['project_name'] = (os.environ['project_name']).lower().replace('_', '-')
- notebook_config['project_tag'] = (os.environ['project_name']).lower().replace('_', '-')
- notebook_config['endpoint_tag'] = (os.environ['endpoint_name']).lower().replace('_', '-')
- notebook_config['region'] = os.environ['gcp_region']
- notebook_config['zone'] = os.environ['gcp_zone']
-
- edge_status = GCPMeta().get_instance_status('{0}-{1}-{2}-edge'.format(notebook_config['service_base_name'],
- notebook_config['project_name'],
- notebook_config['endpoint_tag']))
- if edge_status != 'RUNNING':
- logging.info('ERROR: Edge node is unavailable! Aborting...')
- print('ERROR: Edge node is unavailable! Aborting...')
- ssn_hostname = GCPMeta().get_private_ip_address(notebook_config['service_base_name'] + '-ssn')
- put_resource_status('edge', 'Unavailable', os.environ['ssn_dlab_path'], os.environ['conf_os_user'],
- ssn_hostname)
- append_result("Edge node is unavailable")
- sys.exit(1)
-
try:
- if os.environ['gcp_vpc_name'] == '':
- raise KeyError
- else:
- notebook_config['vpc_name'] = os.environ['gcp_vpc_name']
- except KeyError:
- notebook_config['vpc_name'] = '{}-ssn-vpc'.format(notebook_config['service_base_name'])
- try:
- notebook_config['exploratory_name'] = (os.environ['exploratory_name']).lower().replace('_', '-')
- except:
- notebook_config['exploratory_name'] = ''
- notebook_config['subnet_name'] = '{0}-{1}-subnet'.format(notebook_config['service_base_name'],
- notebook_config['project_name'])
- notebook_config['instance_size'] = os.environ['gcp_notebook_instance_size']
- notebook_config['ssh_key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
- notebook_config['notebook_service_account_name'] = '{}-{}-ps'.format(notebook_config['service_base_name'],
- notebook_config['project_name']).replace('_', '-')
-
- if os.environ['conf_os_family'] == 'debian':
- initial_user = 'ubuntu'
- sudo_group = 'sudo'
- if os.environ['conf_os_family'] == 'redhat':
- initial_user = 'ec2-user'
- sudo_group = 'wheel'
- notebook_config['instance_name'] = '{0}-{1}-{2}-nb-{3}'.format(notebook_config['service_base_name'],
- notebook_config['project_name'], os.environ['endpoint_name'],
- notebook_config['exploratory_name'])
- notebook_config['primary_disk_size'] = (lambda x: '30' if x == 'deeplearning' else '12')(os.environ['application'])
- notebook_config['secondary_disk_size'] = os.environ['notebook_disk_size']
-
- notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
- if notebook_config['shared_image_enabled'] == 'false':
- notebook_config['expected_primary_image_name'] = '{}-{}-{}-{}-primary-image'.format(
- notebook_config['service_base_name'], notebook_config['endpoint_tag'], notebook_config['project_name'],
- os.environ['application'])
- notebook_config['expected_secondary_image_name'] = '{}-{}-{}-{}-secondary-image'.format(
- notebook_config['service_base_name'], notebook_config['endpoint_tag'], notebook_config['project_name'],
- os.environ['application'])
- else:
- notebook_config['expected_primary_image_name'] = '{}-{}-{}-primary-image'.format(
- notebook_config['service_base_name'], notebook_config['endpoint_tag'], os.environ['application'])
- notebook_config['expected_secondary_image_name'] = '{}-{}-{}-secondary-image'.format(
- notebook_config['service_base_name'], notebook_config['endpoint_tag'], os.environ['application'])
- notebook_config['notebook_primary_image_name'] = (lambda x: os.environ['notebook_primary_image_name'] if x != 'None'
- else notebook_config['expected_primary_image_name'])(str(os.environ.get('notebook_primary_image_name')))
- print('Searching pre-configured images')
- notebook_config['primary_image_name'] = GCPMeta().get_image_by_name(notebook_config['expected_primary_image_name'])
- if notebook_config['primary_image_name'] == '':
- notebook_config['primary_image_name'] = os.environ['gcp_{}_image_name'.format(os.environ['conf_os_family'])]
- else:
- print('Pre-configured primary image found. Using: {}'.format(notebook_config['primary_image_name'].get('name')))
- notebook_config['primary_image_name'] = 'global/images/{}'.format(notebook_config['primary_image_name'].get('name'))
-
- notebook_config['secondary_image_name'] = GCPMeta().get_image_by_name(notebook_config['expected_secondary_image_name'])
- if notebook_config['secondary_image_name'] == '':
- notebook_config['secondary_image_name'] = 'None'
- else:
- print('Pre-configured secondary image found. Using: {}'.format(notebook_config['secondary_image_name'].get('name')))
- notebook_config['secondary_image_name'] = 'global/images/{}'.format(notebook_config['secondary_image_name'].get('name'))
-
- notebook_config['gpu_accelerator_type'] = 'None'
-
- if os.environ['application'] in ('tensor', 'tensor-rstudio', 'deeplearning'):
- notebook_config['gpu_accelerator_type'] = os.environ['gcp_gpu_accelerator_type']
-
- notebook_config['network_tag'] = '{0}-{1}-ps'.format(notebook_config['service_base_name'],
- notebook_config['project_name'])
-
- with open('/root/result.json', 'w') as f:
- data = {"notebook_name": notebook_config['instance_name'], "error": ""}
- json.dump(data, f)
-
- additional_tags = os.environ['tags'].replace("': u'", ":").replace("', u'", ",").replace("{u'", "" ).replace("'}", "")
-
- print('Additional tags will be added: {}'.format(additional_tags))
- notebook_config['labels'] = {"name": notebook_config['instance_name'],
- "sbn": notebook_config['service_base_name'],
- "product": "dlab"
- }
-
- for tag in additional_tags.split(','):
- label_key = tag.split(':')[0]
- label_value = tag.split(':')[1]
- if label_key == 'user_tag':
- if '@' in label_value:
- label_value = label_value[:label_value.find('@')]
- if label_value != '':
- notebook_config['labels'].update({label_key: label_value})
+ GCPMeta = dlab.meta_lib.GCPMeta()
+ GCPActions = dlab.actions_lib.GCPActions()
+ print('Generating infrastructure names and tags')
+ notebook_config = dict()
+ notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
+ notebook_config['edge_user_name'] = (os.environ['edge_user_name'])
+ notebook_config['project_name'] = (os.environ['project_name']).replace('_', '-').lower()
+ notebook_config['project_tag'] = notebook_config['project_name']
+ notebook_config['endpoint_name'] = os.environ['endpoint_name'].replace('_', '-').lower()
+ notebook_config['endpoint_tag'] = notebook_config['endpoint_name']
+ notebook_config['region'] = os.environ['gcp_region']
+ notebook_config['zone'] = os.environ['gcp_zone']
+
+ edge_status = GCPMeta.get_instance_status('{0}-{1}-{2}-edge'.format(notebook_config['service_base_name'],
+ notebook_config['project_name'],
+ notebook_config['endpoint_tag']))
+ if edge_status != 'RUNNING':
+ logging.info('ERROR: Edge node is unavailable! Aborting...')
+ print('ERROR: Edge node is unavailable! Aborting...')
+ ssn_hostname = GCPMeta.get_private_ip_address(notebook_config['service_base_name'] + '-ssn')
+ dlab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_dlab_path'], os.environ['conf_os_user'],
+ ssn_hostname)
+ dlab.fab.append_result("Edge node is unavailable")
+ sys.exit(1)
+ try:
+ if os.environ['gcp_vpc_name'] == '':
+ raise KeyError
+ else:
+ notebook_config['vpc_name'] = os.environ['gcp_vpc_name']
+ except KeyError:
+ notebook_config['vpc_name'] = '{}-vpc'.format(notebook_config['service_base_name'])
+ try:
+ notebook_config['exploratory_name'] = (os.environ['exploratory_name']).replace('_', '-').lower()
+ except:
+ notebook_config['exploratory_name'] = ''
+ notebook_config['subnet_name'] = '{0}-{1}-{2}-subnet'.format(notebook_config['service_base_name'],
+ notebook_config['project_name'],
+ notebook_config['endpoint_tag'])
+ notebook_config['instance_size'] = os.environ['gcp_notebook_instance_size']
+ notebook_config['ssh_key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
+ notebook_config['notebook_service_account_name'] = '{}-{}-{}-ps-sa'.format(notebook_config['service_base_name'],
+ notebook_config['project_name'],
+ notebook_config['endpoint_name'])
+
+ if os.environ['conf_os_family'] == 'debian':
+ notebook_config['initial_user'] = 'ubuntu'
+ notebook_config['sudo_group'] = 'sudo'
+ if os.environ['conf_os_family'] == 'redhat':
+ notebook_config['initial_user'] = 'ec2-user'
+ notebook_config['sudo_group'] = 'wheel'
+ notebook_config['instance_name'] = '{0}-{1}-{2}-nb-{3}'.format(notebook_config['service_base_name'],
+ notebook_config['project_name'],
+ notebook_config['endpoint_name'],
+ notebook_config['exploratory_name'])
+ notebook_config['primary_disk_size'] = (lambda x: '30' if x == 'deeplearning' else '12')(
+ os.environ['application'])
+ notebook_config['secondary_disk_size'] = os.environ['notebook_disk_size']
+
+ notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
+ if notebook_config['shared_image_enabled'] == 'false':
+ notebook_config['expected_primary_image_name'] = '{}-{}-{}-{}-primary-image'.format(
+ notebook_config['service_base_name'], notebook_config['project_name'], notebook_config['endpoint_tag'],
+ os.environ['application'])
+ notebook_config['expected_secondary_image_name'] = '{}-{}-{}-{}-secondary-image'.format(
+ notebook_config['service_base_name'], notebook_config['project_name'], notebook_config['endpoint_tag'],
+ os.environ['application'])
+ else:
+ notebook_config['expected_primary_image_name'] = '{}-{}-{}-primary-image'.format(
+ notebook_config['service_base_name'], notebook_config['endpoint_name'], os.environ['application'])
+ notebook_config['expected_secondary_image_name'] = '{}-{}-{}-secondary-image'.format(
+ notebook_config['service_base_name'], notebook_config['endpoint_name'], os.environ['application'])
+ notebook_config['notebook_primary_image_name'] = \
+ (lambda x: os.environ['notebook_primary_image_name'] if x != 'None'
+ else notebook_config['expected_primary_image_name'])(str(os.environ.get('notebook_primary_image_name')))
+ print('Searching pre-configured images')
+ notebook_config['primary_image_name'] = GCPMeta.get_image_by_name(
+ notebook_config['expected_primary_image_name'])
+ if notebook_config['primary_image_name'] == '':
+ notebook_config['primary_image_name'] = os.environ['gcp_{}_image_name'.format(os.environ['conf_os_family'])]
+ else:
+ print('Pre-configured primary image found. Using: {}'.format(
+ notebook_config['primary_image_name'].get('name')))
+ notebook_config['primary_image_name'] = 'global/images/{}'.format(
+ notebook_config['primary_image_name'].get('name'))
+
+ notebook_config['secondary_image_name'] = GCPMeta.get_image_by_name(
+ notebook_config['expected_secondary_image_name'])
+ if notebook_config['secondary_image_name'] == '':
+ notebook_config['secondary_image_name'] = 'None'
+ else:
+ print('Pre-configured secondary image found. Using: {}'.format(
+ notebook_config['secondary_image_name'].get('name')))
+ notebook_config['secondary_image_name'] = 'global/images/{}'.format(
+ notebook_config['secondary_image_name'].get('name'))
+
+ notebook_config['gpu_accelerator_type'] = 'None'
+
+ if os.environ['application'] in ('tensor', 'tensor-rstudio', 'deeplearning'):
+ notebook_config['gpu_accelerator_type'] = os.environ['gcp_gpu_accelerator_type']
+
+ notebook_config['network_tag'] = '{0}-{1}-{2}-ps'.format(notebook_config['service_base_name'],
+ notebook_config['project_name'],
+ notebook_config['endpoint_name'])
+
+ with open('/root/result.json', 'w') as f:
+ data = {"notebook_name": notebook_config['instance_name'], "error": ""}
+ json.dump(data, f)
+
- additional_tags = json.loads(
- os.environ['tags'].replace("': u'", "\": \"").replace("', u'", "\", \"").replace(
- "{u'", "{\"").replace("'}", "\"}"))
++ additional_tags = os.environ['tags'].replace("': u'", ":").replace("', u'", ",").replace("{u'", "").replace(
++ "'}", "")
+
- if '@' in additional_tags['user_tag']:
- notebook_config['user_tag'] = additional_tags['user_tag'][:additional_tags['user_tag'].find('@')]
- else:
- notebook_config['user_tag'] = additional_tags['user_tag']
-
- notebook_config['custom_tag'] = additional_tags['custom_tag']
+ print('Additional tags will be added: {}'.format(additional_tags))
+ notebook_config['labels'] = {"name": notebook_config['instance_name'],
+ "sbn": notebook_config['service_base_name'],
- "project_tag": notebook_config['project_tag'],
- "endpoint_tag": notebook_config['endpoint_tag'],
- "user": notebook_config['user_tag'],
+ "product": "dlab"
+ }
+
- if notebook_config['custom_tag'] != '':
- notebook_config['labels'].update({'custom_tag': notebook_config['custom_tag']})
++ for tag in additional_tags.split(','):
++ label_key = tag.split(':')[0]
++ label_value = tag.split(':')[1]
++ if label_key == 'user_tag':
++ if '@' in label_value:
++ label_value = label_value[:label_value.find('@')]
++ if label_value != '':
++ notebook_config['labels'].update({label_key: label_value})
+ except Exception as err:
+ dlab.fab.append_result("Failed to generate variables dictionary.", str(err))
+ sys.exit(1)
# launching instance for notebook server
try:
logging.info('[CREATE NOTEBOOK INSTANCE]')
diff --cc infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
index cf0c39d,4fc3028..4b2d8c1
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
@@@ -43,84 -42,73 +43,81 @@@ if __name__ == "__main__"
level=logging.INFO,
filename=local_log_filepath)
try:
- os.environ['exploratory_name']
- except:
- os.environ['exploratory_name'] = ''
- if os.path.exists('/response/.dataproc_creating_{}'.format(os.environ['exploratory_name'])):
- time.sleep(30)
-
- print('Generating infrastructure names and tags')
- dataproc_conf = dict()
- try:
- dataproc_conf['exploratory_name'] = (os.environ['exploratory_name']).lower().replace('_', '-')
- except:
- dataproc_conf['exploratory_name'] = ''
- try:
- dataproc_conf['computational_name'] = (os.environ['computational_name']).lower().replace('_', '-')
- except:
- dataproc_conf['computational_name'] = ''
- dataproc_conf['service_base_name'] = (os.environ['conf_service_base_name']).lower().replace('_', '-')
- dataproc_conf['edge_user_name'] = (os.environ['edge_user_name']).lower().replace('_', '-')
- dataproc_conf['project_name'] = (os.environ['project_name']).lower().replace('_', '-')
- dataproc_conf['project_tag'] = (os.environ['project_name']).lower().replace('_', '-')
- dataproc_conf['endpoint_tag'] = (os.environ['endpoint_name']).lower().replace('_', '-')
- dataproc_conf['endpoint_name'] = (os.environ['endpoint_name']).lower().replace('_', '-')
- dataproc_conf['key_name'] = os.environ['conf_key_name']
- dataproc_conf['key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
- dataproc_conf['region'] = os.environ['gcp_region']
- dataproc_conf['zone'] = os.environ['gcp_zone']
- dataproc_conf['subnet'] = '{0}-{1}-subnet'.format(dataproc_conf['service_base_name'], dataproc_conf['project_name'])
- dataproc_conf['cluster_name'] = '{0}-{1}-des-{2}-{3}'.format(dataproc_conf['service_base_name'], dataproc_conf['project_name'],
- dataproc_conf['exploratory_name'], dataproc_conf['computational_name'])
- dataproc_conf['cluster_tag'] = '{0}-{1}-ps'.format(dataproc_conf['service_base_name'], dataproc_conf['project_name'])
- dataproc_conf['bucket_name'] = '{0}-{1}-{2}-bucket'.format(dataproc_conf['service_base_name'],
+ GCPMeta = dlab.meta_lib.GCPMeta()
+ GCPActions = dlab.actions_lib.GCPActions()
+ print('Generating infrastructure names and tags')
+ dataproc_conf = dict()
+ if 'exploratory_name' in os.environ:
+ dataproc_conf['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
+ else:
+ dataproc_conf['exploratory_name'] = ''
+ if 'computational_name' in os.environ:
+ dataproc_conf['computational_name'] = os.environ['computational_name'].replace('_', '-').lower()
+ else:
+ dataproc_conf['computational_name'] = ''
+ if os.path.exists('/response/.dataproc_creating_{}'.format(dataproc_conf['exploratory_name'])):
+ time.sleep(30)
+ dataproc_conf['service_base_name'] = (os.environ['conf_service_base_name'])
+ dataproc_conf['edge_user_name'] = (os.environ['edge_user_name'])
+ dataproc_conf['project_name'] = (os.environ['project_name']).replace('_', '-').lower()
+ dataproc_conf['project_tag'] = dataproc_conf['project_name']
+ dataproc_conf['endpoint_name'] = (os.environ['endpoint_name']).replace('_', '-').lower()
+ dataproc_conf['endpoint_tag'] = dataproc_conf['endpoint_name']
+ dataproc_conf['key_name'] = os.environ['conf_key_name']
+ dataproc_conf['key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
+ dataproc_conf['region'] = os.environ['gcp_region']
+ dataproc_conf['zone'] = os.environ['gcp_zone']
+ dataproc_conf['subnet'] = '{0}-{1}-{2}-subnet'.format(dataproc_conf['service_base_name'],
+ dataproc_conf['project_name'],
+ dataproc_conf['endpoint_name'])
+ dataproc_conf['cluster_name'] = '{0}-{1}-{2}-des-{3}'.format(dataproc_conf['service_base_name'],
+ dataproc_conf['project_name'],
+ dataproc_conf['endpoint_name'],
+ dataproc_conf['computational_name'])
+ dataproc_conf['cluster_tag'] = '{0}-{1}-{2}-ps'.format(dataproc_conf['service_base_name'],
dataproc_conf['project_name'],
dataproc_conf['endpoint_name'])
- dataproc_conf['release_label'] = os.environ['dataproc_version']
-
- additional_tags = os.environ['tags'].replace("': u'", ":").replace("', u'", ",").replace("{u'", "" ).replace("'}", "")
-
- dataproc_conf['cluster_labels'] = {
- os.environ['notebook_instance_name']: "not-configured",
- "name": dataproc_conf['cluster_name'],
- "sbn": dataproc_conf['service_base_name'],
- "notebook_name": os.environ['notebook_instance_name'],
- "product": "dlab",
- "computational_name": dataproc_conf['computational_name']
- }
-
- for tag in additional_tags.split(','):
- label_key = tag.split(':')[0]
- label_value = tag.split(':')[1]
- if label_key == 'user_tag':
- if '@' in label_value:
- label_value = label_value[:label_value.find('@')]
- if label_value != '':
- dataproc_conf['cluster_labels'].update({label_key: label_value})
-
- dataproc_conf['dataproc_service_account_name'] = '{0}-{1}-ps'.format(dataproc_conf['service_base_name'],
- dataproc_conf['project_name'])
- dataproc_conf['dataproc_unique_index'] = GCPMeta().get_index_by_service_account_name(dataproc_conf['dataproc_service_account_name'])
- service_account_email = "{}-{}@{}.iam.gserviceaccount.com".format(dataproc_conf['service_base_name'],
- dataproc_conf['dataproc_unique_index'],
- os.environ['gcp_project_id'])
- dataproc_conf['edge_instance_hostname'] = '{0}-{1}-{2}-edge'.format(dataproc_conf['service_base_name'],
- dataproc_conf['project_name'],
- dataproc_conf['endpoint_name'])
- dataproc_conf['dlab_ssh_user'] = os.environ['conf_os_user']
+ dataproc_conf['bucket_name'] = '{0}-{1}-{2}-bucket'.format(dataproc_conf['service_base_name'],
+ dataproc_conf['project_name'],
+ dataproc_conf['endpoint_name'])
+ dataproc_conf['release_label'] = os.environ['dataproc_version']
- additional_tags = json.loads(
- os.environ['tags'].replace("': u'", "\": \"").replace("', u'", "\", \"").replace(
- "{u'", "{\"").replace("'}", "\"}"))
++ additional_tags = os.environ['tags'].replace("': u'", ":").replace("', u'", ",").replace("{u'", "").replace(
++ "'}", "")
+
- if '@' in additional_tags['user_tag']:
- dataproc_conf['user_tag'] = additional_tags['user_tag'][:additional_tags['user_tag'].find('@')]
- else:
- dataproc_conf['user_tag'] = additional_tags['user_tag']
-
- dataproc_conf['custom_tag'] = additional_tags['custom_tag']
+ dataproc_conf['cluster_labels'] = {
+ os.environ['notebook_instance_name']: "not-configured",
+ "name": dataproc_conf['cluster_name'],
+ "sbn": dataproc_conf['service_base_name'],
- "user": dataproc_conf['edge_user_name'],
- "project_tag": dataproc_conf['project_tag'],
- "endpoint_tag": dataproc_conf['endpoint_tag'],
+ "notebook_name": os.environ['notebook_instance_name'],
+ "product": "dlab",
+ "computational_name": dataproc_conf['computational_name']
+ }
- if dataproc_conf['custom_tag'] != '':
- dataproc_conf['cluster_labels'].update({'custom_tag': dataproc_conf['custom_tag']})
++
++ for tag in additional_tags.split(','):
++ label_key = tag.split(':')[0]
++ label_value = tag.split(':')[1]
++ if label_key == 'user_tag':
++ if '@' in label_value:
++ label_value = label_value[:label_value.find('@')]
++ if label_value != '':
++ dataproc_conf['cluster_labels'].update({label_key: label_value})
+ dataproc_conf['dataproc_service_account_name'] = '{0}-{1}-{2}-ps-sa'.format(dataproc_conf['service_base_name'],
+ dataproc_conf['project_name'],
+ dataproc_conf['endpoint_name'])
+ dataproc_conf['dataproc_unique_index'] = GCPMeta.get_index_by_service_account_name(
+ dataproc_conf['dataproc_service_account_name'])
+ service_account_email = "{}-{}@{}.iam.gserviceaccount.com".format(dataproc_conf['service_base_name'],
+ dataproc_conf['dataproc_unique_index'],
+ os.environ['gcp_project_id'])
+ dataproc_conf['edge_instance_hostname'] = '{0}-{1}-{2}-edge'.format(dataproc_conf['service_base_name'],
+ dataproc_conf['project_name'],
+ dataproc_conf['endpoint_name'])
+ dataproc_conf['dlab_ssh_user'] = os.environ['conf_os_user']
+ except Exception as err:
+ dlab.fab.append_result("Failed to generate variables dictionary. Exception:" + str(err))
+ sys.exit(1)
- edge_status = GCPMeta().get_instance_status(dataproc_conf['edge_instance_hostname'])
+ edge_status = GCPMeta.get_instance_status(dataproc_conf['edge_instance_hostname'])
if edge_status != 'RUNNING':
logging.info('ERROR: Edge node is unavailable! Aborting...')
print('ERROR: Edge node is unavailable! Aborting...')
diff --cc infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
index ef9a000,5088de1..447fb26
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
@@@ -40,150 -38,134 +40,144 @@@ if __name__ == "__main__"
logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG,
filename=local_log_filepath)
+ try:
+ GCPMeta = dlab.meta_lib.GCPMeta()
+ GCPActions = dlab.actions_lib.GCPActions()
+ print('Generating infrastructure names and tags')
+ data_engine = dict()
+ data_engine['service_base_name'] = (os.environ['conf_service_base_name'])
+ data_engine['edge_user_name'] = (os.environ['edge_user_name'])
+ data_engine['project_name'] = (os.environ['project_name']).replace('_', '-').lower()
+ data_engine['project_tag'] = data_engine['project_name']
+ data_engine['endpoint_name'] = os.environ['endpoint_name'].replace('_', '-').lower()
+ data_engine['endpoint_tag'] = data_engine['endpoint_name']
+ data_engine['region'] = os.environ['gcp_region']
+ data_engine['zone'] = os.environ['gcp_zone']
+
+ edge_status = GCPMeta.get_instance_status('{0}-{1}-{2}-edge'.format(data_engine['service_base_name'],
+ data_engine['project_name'],
+ data_engine['endpoint_name']))
+ if edge_status != 'RUNNING':
+ logging.info('ERROR: Edge node is unavailable! Aborting...')
+ print('ERROR: Edge node is unavailable! Aborting...')
+ ssn_hostname = GCPMeta.get_private_ip_address(data_engine['service_base_name'] + '-ssn')
+ dlab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_dlab_path'], os.environ['conf_os_user'],
+ ssn_hostname)
+ dlab.fab.append_result("Edge node is unavailable")
+ sys.exit(1)
- print('Generating infrastructure names and tags')
- data_engine = dict()
- data_engine['service_base_name'] = (os.environ['conf_service_base_name']).lower().replace('_', '-')
- data_engine['edge_user_name'] = (os.environ['edge_user_name']).lower().replace('_', '-')
- data_engine['project_name'] = (os.environ['project_name']).lower().replace('_', '-')
- data_engine['project_tag'] = (os.environ['project_name']).lower().replace('_', '-')
- data_engine['endpoint_tag'] = (os.environ['endpoint_name']).lower().replace('_', '-')
- data_engine['region'] = os.environ['gcp_region']
- data_engine['zone'] = os.environ['gcp_zone']
- data_engine['endpoint_name'] = os.environ['endpoint_name']
-
- edge_status = GCPMeta().get_instance_status('{0}-{1}-{2}-edge'.format(data_engine['service_base_name'],
- data_engine['project_name'],
- data_engine['endpoint_name']))
- if edge_status != 'RUNNING':
- logging.info('ERROR: Edge node is unavailable! Aborting...')
- print('ERROR: Edge node is unavailable! Aborting...')
- ssn_hostname = GCPMeta().get_private_ip_address(data_engine['service_base_name'] + '-ssn')
- put_resource_status('edge', 'Unavailable', os.environ['ssn_dlab_path'], os.environ['conf_os_user'],
- ssn_hostname)
- append_result("Edge node is unavailable")
- sys.exit(1)
+ try:
+ if os.environ['gcp_vpc_name'] == '':
+ raise KeyError
+ else:
+ data_engine['vpc_name'] = os.environ['gcp_vpc_name']
+ except KeyError:
+ data_engine['vpc_name'] = '{}-vpc'.format(data_engine['service_base_name'])
+ if 'exploratory_name' in os.environ:
+ data_engine['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
+ else:
+ data_engine['exploratory_name'] = ''
+ if 'computational_name' in os.environ:
+ data_engine['computational_name'] = os.environ['computational_name'].replace('_', '-').lower()
+ else:
+ data_engine['computational_name'] = ''
+
+ data_engine['subnet_name'] = '{0}-{1}-{2}-subnet'.format(data_engine['service_base_name'],
+ data_engine['project_name'],
+ data_engine['endpoint_name'])
+ data_engine['master_size'] = os.environ['gcp_dataengine_master_size']
+ data_engine['slave_size'] = os.environ['gcp_dataengine_slave_size']
+ data_engine['ssh_key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
+ data_engine['dataengine_service_account_name'] = '{}-{}-{}-ps-sa'.format(data_engine['service_base_name'],
+ data_engine['project_name'],
+ data_engine['endpoint_name'])
+
+ if os.environ['conf_os_family'] == 'debian':
+ initial_user = 'ubuntu'
+ sudo_group = 'sudo'
+ if os.environ['conf_os_family'] == 'redhat':
+ initial_user = 'ec2-user'
+ sudo_group = 'wheel'
+ data_engine['cluster_name'] = "{}-{}-{}-de-{}".format(data_engine['service_base_name'],
+ data_engine['project_name'],
+ data_engine['endpoint_name'],
+ data_engine['computational_name'])
+ data_engine['master_node_name'] = data_engine['cluster_name'] + '-m'
+ data_engine['slave_node_name'] = data_engine['cluster_name'] + '-s'
+ data_engine['instance_count'] = int(os.environ['dataengine_instance_count'])
+ data_engine['notebook_name'] = os.environ['notebook_instance_name']
+
+ data_engine['primary_disk_size'] = '30'
+ data_engine['secondary_disk_size'] = os.environ['notebook_disk_size']
+
+ data_engine['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
+ if data_engine['shared_image_enabled'] == 'false':
+ data_engine['expected_primary_image_name'] = '{}-{}-{}-{}-primary-image'.format(
+ data_engine['service_base_name'], data_engine['project_name'], data_engine['endpoint_tag'],
+ os.environ['application'])
+ data_engine['expected_secondary_image_name'] = '{}-{}-{}-{}-secondary-image'.format(
+ data_engine['service_base_name'], data_engine['project_name'], data_engine['endpoint_tag'],
+ os.environ['application'])
+ else:
+ data_engine['expected_primary_image_name'] = '{}-{}-{}-primary-image'.format(
+ data_engine['service_base_name'], data_engine['endpoint_tag'], os.environ['application'])
+ data_engine['expected_secondary_image_name'] = '{}-{}-{}-secondary-image'.format(
+ data_engine['service_base_name'], data_engine['endpoint_tag'], os.environ['application'])
+ data_engine['notebook_primary_image_name'] = (lambda x: os.environ['notebook_primary_image_name'] if x != 'None'
+ else data_engine['expected_primary_image_name'])(str(os.environ.get('notebook_primary_image_name')))
+ print('Searching pre-configured images')
+ data_engine['primary_image_name'] = GCPMeta.get_image_by_name(data_engine['notebook_primary_image_name'])
+ if data_engine['primary_image_name'] == '':
+ data_engine['primary_image_name'] = os.environ['gcp_{}_image_name'.format(os.environ['conf_os_family'])]
+ else:
+ print('Pre-configured primary image found. Using: {}'.format(data_engine['primary_image_name'].get('name')))
+ data_engine['primary_image_name'] = 'global/images/{}'.format(
+ data_engine['primary_image_name'].get('name'))
- try:
- if os.environ['gcp_vpc_name'] == '':
- raise KeyError
+ data_engine['secondary_image_name'] = GCPMeta.get_image_by_name(data_engine['expected_secondary_image_name'])
+ if data_engine['secondary_image_name'] == '':
+ data_engine['secondary_image_name'] = 'None'
else:
- data_engine['vpc_name'] = os.environ['gcp_vpc_name']
- except KeyError:
- data_engine['vpc_name'] = '{}-ssn-vpc'.format(data_engine['service_base_name'])
- try:
- data_engine['exploratory_name'] = (os.environ['exploratory_name']).lower().replace('_', '-')
- except:
- data_engine['exploratory_name'] = ''
- try:
- data_engine['computational_name'] = os.environ['computational_name'].lower().replace('_', '-')
- except:
- data_engine['computational_name'] = ''
-
- data_engine['subnet_name'] = '{0}-{1}-subnet'.format(data_engine['service_base_name'],
- data_engine['project_name'])
- data_engine['master_size'] = os.environ['gcp_dataengine_master_size']
- data_engine['slave_size'] = os.environ['gcp_dataengine_slave_size']
- data_engine['ssh_key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
- data_engine['dataengine_service_account_name'] = '{}-{}-ps'.format(data_engine['service_base_name'],
- data_engine['project_name'])
-
- if os.environ['conf_os_family'] == 'debian':
- initial_user = 'ubuntu'
- sudo_group = 'sudo'
- if os.environ['conf_os_family'] == 'redhat':
- initial_user = 'ec2-user'
- sudo_group = 'wheel'
- data_engine['cluster_name'] = data_engine['service_base_name'] + '-' + data_engine['project_name'] + \
- '-de-' + data_engine['exploratory_name'] + '-' + \
- data_engine['computational_name']
- data_engine['master_node_name'] = data_engine['cluster_name'] + '-m'
- data_engine['slave_node_name'] = data_engine['cluster_name'] + '-s'
- data_engine['instance_count'] = int(os.environ['dataengine_instance_count'])
- data_engine['notebook_name'] = os.environ['notebook_instance_name']
-
- data_engine['primary_disk_size'] = '30'
- data_engine['secondary_disk_size'] = os.environ['notebook_disk_size']
-
- data_engine['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
- if data_engine['shared_image_enabled'] == 'false':
- data_engine['expected_primary_image_name'] = '{}-{}-{}-{}-primary-image'.format(
- data_engine['service_base_name'], data_engine['endpoint_tag'], data_engine['project_name'],
- os.environ['application'])
- data_engine['expected_secondary_image_name'] = '{}-{}-{}-{}-secondary-image'.format(
- data_engine['service_base_name'], data_engine['endpoint_tag'], data_engine['project_name'],
- os.environ['application'])
- else:
- data_engine['expected_primary_image_name'] = '{}-{}-{}-primary-image'.format(
- data_engine['service_base_name'], data_engine['endpoint_tag'], os.environ['application'])
- data_engine['expected_secondary_image_name'] = '{}-{}-{}-secondary-image'.format(
- data_engine['service_base_name'], data_engine['endpoint_tag'], os.environ['application'])
- data_engine['notebook_primary_image_name'] = (lambda x: os.environ['notebook_primary_image_name'] if x != 'None'
- else data_engine['expected_primary_image_name'])(str(os.environ.get('notebook_primary_image_name')))
- print('Searching pre-configured images')
- data_engine['primary_image_name'] = GCPMeta().get_image_by_name(data_engine['notebook_primary_image_name'])
- if data_engine['primary_image_name'] == '':
- data_engine['primary_image_name'] = os.environ['gcp_{}_image_name'.format(os.environ['conf_os_family'])]
- else:
- print('Pre-configured primary image found. Using: {}'.format(data_engine['primary_image_name'].get('name')))
- data_engine['primary_image_name'] = 'global/images/{}'.format(
- data_engine['primary_image_name'].get('name'))
-
- data_engine['secondary_image_name'] = GCPMeta().get_image_by_name(data_engine['expected_secondary_image_name'])
- if data_engine['secondary_image_name'] == '':
- data_engine['secondary_image_name'] = 'None'
- else:
- print('Pre-configured secondary image found. Using: {}'.format(data_engine['secondary_image_name'].get('name')))
- data_engine['secondary_image_name'] = 'global/images/{}'.format(data_engine['secondary_image_name'].get('name'))
-
- with open('/root/result.json', 'w') as f:
- data = {"hostname": data_engine['cluster_name'], "error": ""}
- json.dump(data, f)
-
- data_engine['gpu_accelerator_type'] = 'None'
- if os.environ['application'] in ('tensor', 'tensor-rstudio', 'deeplearning'):
- data_engine['gpu_accelerator_type'] = os.environ['gcp_gpu_accelerator_type']
- data_engine['network_tag'] = '{0}-{1}-ps'.format(data_engine['service_base_name'],
- data_engine['project_name'])
-
- additional_tags = os.environ['tags'].replace("': u'", ":").replace("', u'", ",").replace("{u'", "" ).replace("'}", "")
-
- data_engine['slave_labels'] = {"name": data_engine['cluster_name'],
- "sbn": data_engine['service_base_name'],
- "type": "slave",
- "notebook_name": data_engine['notebook_name'],
- "product": "dlab"}
- data_engine['master_labels'] = {"name": data_engine['cluster_name'],
- "sbn": data_engine['service_base_name'],
- "type": "master",
- "notebook_name": data_engine['notebook_name'],
- "product": "dlab"}
-
- for tag in additional_tags.split(','):
- label_key = tag.split(':')[0]
- label_value = tag.split(':')[1]
- if label_key == 'user_tag':
- if '@' in label_value:
- label_value = label_value[:label_value.find('@')]
- if label_value != '':
- data_engine['slave_labels'].update({label_key: label_value})
- data_engine['master_labels'].update({label_key: label_value})
+ print('Pre-configured secondary image found. Using: {}'.format(
+ data_engine['secondary_image_name'].get('name')))
+ data_engine['secondary_image_name'] = 'global/images/{}'.format(
+ data_engine['secondary_image_name'].get('name'))
+
+ with open('/root/result.json', 'w') as f:
+ data = {"hostname": data_engine['cluster_name'], "error": ""}
+ json.dump(data, f)
+
+ data_engine['gpu_accelerator_type'] = 'None'
+ if os.environ['application'] in ('tensor', 'tensor-rstudio', 'deeplearning'):
+ data_engine['gpu_accelerator_type'] = os.environ['gcp_gpu_accelerator_type']
+ data_engine['network_tag'] = '{0}-{1}-{2}-ps'.format(data_engine['service_base_name'],
+ data_engine['project_name'], data_engine['endpoint_name'])
- additional_tags = json.loads(
- os.environ['tags'].replace("': u'", "\": \"").replace("', u'", "\", \"").replace(
- "{u'", "{\"").replace("'}", "\"}"))
++ additional_tags = os.environ['tags'].replace("': u'", ":").replace("', u'", ",").replace("{u'", "").replace(
++ "'}", "")
+
- if '@' in additional_tags['user_tag']:
- data_engine['user_tag'] = additional_tags['user_tag'][:additional_tags['user_tag'].find('@')]
- else:
- data_engine['user_tag'] = additional_tags['user_tag']
-
- data_engine['custom_tag'] = additional_tags['custom_tag']
+ data_engine['slave_labels'] = {"name": data_engine['cluster_name'],
+ "sbn": data_engine['service_base_name'],
- "user": data_engine['edge_user_name'],
- "project_tag": data_engine['project_tag'],
- "endpoint_tag": data_engine['endpoint_tag'],
+ "type": "slave",
+ "notebook_name": data_engine['notebook_name'],
+ "product": "dlab"}
+ data_engine['master_labels'] = {"name": data_engine['cluster_name'],
+ "sbn": data_engine['service_base_name'],
- "user": data_engine['edge_user_name'],
- "project_tag": data_engine['project_tag'],
- "endpoint_tag": data_engine['endpoint_tag'],
+ "type": "master",
+ "notebook_name": data_engine['notebook_name'],
+ "product": "dlab"}
- if data_engine['custom_tag'] != '':
- data_engine['slave_labels'].update({'custom_tag': data_engine['custom_tag']})
- data_engine['master_labels'].update({'custom_tag': data_engine['custom_tag']})
++
++ for tag in additional_tags.split(','):
++ label_key = tag.split(':')[0]
++ label_value = tag.split(':')[1]
++ if label_key == 'user_tag':
++ if '@' in label_value:
++ label_value = label_value[:label_value.find('@')]
++ if label_value != '':
++ data_engine['slave_labels'].update({label_key: label_value})
++ data_engine['master_labels'].update({label_key: label_value})
+ except Exception as err:
+ dlab.fab.append_result("Failed to generate variables dictionary. Exception:" + str(err))
+ sys.exit(1)
try:
logging.info('[CREATE MASTER NODE]')
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org