You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@dlab.apache.org by ad...@apache.org on 2019/09/10 12:43:02 UTC
[incubator-dlab] 01/01: [DLAB-1090] Include EMR disk name
cluster_name, instead of instance_name on GCP.
This is an automated email from the ASF dual-hosted git repository.
adamsd pushed a commit to branch DLAB-1090
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git
commit a555c58980d434d74decd7d2c1fc454681f1b154
Author: AdamsDisturber <ad...@gmail.com>
AuthorDate: Tue Sep 10 15:42:41 2019 +0300
[DLAB-1090] Include EMR disk name cluster_name, instead of instance_name on GCP.
---
.../src/general/lib/gcp/actions_lib.py | 19 ++++++++++++++++---
.../src/general/scripts/gcp/common_create_instance.py | 4 +++-
.../src/general/scripts/gcp/dataengine_prepare.py | 13 +++++++++----
3 files changed, 28 insertions(+), 8 deletions(-)
diff --git a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
index 6b405a6..f6c14e2 100644
--- a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
@@ -265,10 +265,10 @@ class GCPActions:
file=sys.stdout)}))
traceback.print_exc(file=sys.stdout)
- def create_instance(self, instance_name, region, zone, vpc_name, subnet_name, instance_size,
+ def create_instance(self, instance_name, cluster_name, region, zone, vpc_name, subnet_name, instance_size,
ssh_key_path,
- initial_user, image_name, secondary_image_name, service_account_name, instance_class, network_tag,
- labels, static_ip='',
+ initial_user, image_name, secondary_image_name, service_account_name, instance_class,
+ network_tag, labels, static_ip='',
primary_disk_size='12', secondary_disk_size='30',
gpu_accelerator_type='None'):
key = RSA.importKey(open(ssh_key_path, 'rb').read())
@@ -312,6 +312,19 @@ class GCPActions:
instance_name)
}
]
+ elif instance_class == 'dataengine':
+ disks = [{
+ "name": instance_name,
+ "tag_name": instance_name + '-volume-primary',
+ "deviceName": cluster_name + '-primary',
+ "autoDelete": 'true',
+ "initializeParams": {
+ "diskSizeGb": primary_disk_size,
+ "sourceImage": image_name
+ },
+ "boot": 'true',
+ "mode": "READ_WRITE"
+ }]
else:
disks = [{
"name": instance_name,
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_create_instance.py b/infrastructure-provisioning/src/general/scripts/gcp/common_create_instance.py
index 175911e..25b39ae 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_create_instance.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_create_instance.py
@@ -47,6 +47,7 @@ parser.add_argument('--static_ip', type=str, default='')
parser.add_argument('--labels', type=str, default='{"empty":"string"}')
parser.add_argument('--gpu_accelerator_type', type=str, default='None')
parser.add_argument('--network_tag', type=str, default='')
+parser.add_argument('--cluster_name', type=str, default='')
args = parser.parse_args()
@@ -56,7 +57,8 @@ if __name__ == "__main__":
print("REQUESTED INSTANCE {} ALREADY EXISTS".format(args.instance_name))
else:
print("Creating Instance {}".format(args.instance_name))
- GCPActions().create_instance(args.instance_name, args.region, args.zone, args.vpc_name, args.subnet_name,
+ GCPActions().create_instance(args.instance_name, args.cluster_name, args.region, args.zone,
+ args.vpc_name, args.subnet_name,
args.instance_size, args.ssh_key_path, args.initial_user, args.image_name,
args.secondary_image_name, args.service_account_name, args.instance_class,
args.network_tag, json.loads(args.labels), args.static_ip,
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
index 72fbc8e..2a11761 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
@@ -123,11 +123,13 @@ if __name__ == "__main__":
try:
logging.info('[CREATE MASTER NODE]')
print('[CREATE MASTER NODE]')
- params = "--instance_name {} --region {} --zone {} --vpc_name {} --subnet_name {} --instance_size {} --ssh_key_path {} --initial_user {} --service_account_name {} --image_name {} --instance_class {} --primary_disk_size {} --gpu_accelerator_type {} --network_tag {} --labels '{}'".\
+ params = "--instance_name {} --region {} --zone {} --vpc_name {} --subnet_name {} --instance_size {} " \
+ "--ssh_key_path {} --initial_user {} --service_account_name {} --image_name {} --instance_class {} " \
+ "--primary_disk_size {} --gpu_accelerator_type {} --network_tag {} --cluster_name {} --labels '{}'".\
format(data_engine['master_node_name'], data_engine['region'], data_engine['zone'], data_engine['vpc_name'],
data_engine['subnet_name'], data_engine['master_size'], data_engine['ssh_key_path'], initial_user,
data_engine['dataengine_service_account_name'], data_engine['image_name'], 'dataengine', '30',
- data_engine['gpu_accelerator_type'], data_engine['network_tag'],
+ data_engine['gpu_accelerator_type'], data_engine['network_tag'], data_engine['cluster_name'],
json.dumps(data_engine['master_labels']))
try:
local("~/scripts/{}.py {}".format('common_create_instance', params))
@@ -145,12 +147,15 @@ if __name__ == "__main__":
logging.info('[CREATE SLAVE NODE {}]'.format(i + 1))
print('[CREATE SLAVE NODE {}]'.format(i + 1))
slave_name = data_engine['slave_node_name'] + '{}'.format(i + 1)
- params = "--instance_name {} --region {} --zone {} --vpc_name {} --subnet_name {} --instance_size {} --ssh_key_path {} --initial_user {} --service_account_name {} --image_name {} --instance_class {} --primary_disk_size {} --gpu_accelerator_type {} --network_tag {} --labels '{}'". \
+ params = "--instance_name {} --region {} --zone {} --vpc_name {} --subnet_name {} --instance_size {} " \
+ "--ssh_key_path {} --initial_user {} --service_account_name {} --image_name {} " \
+ "--instance_class {} --primary_disk_size {} " \
+ "--gpu_accelerator_type {} --network_tag {} --cluster_name {} --labels '{}'". \
format(slave_name, data_engine['region'], data_engine['zone'],
data_engine['vpc_name'], data_engine['subnet_name'], data_engine['slave_size'],
data_engine['ssh_key_path'], initial_user, data_engine['dataengine_service_account_name'],
data_engine['image_name'], 'dataengine', '30', data_engine['gpu_accelerator_type'],
- data_engine['network_tag'], json.dumps(data_engine['slave_labels']))
+ data_engine['network_tag'], data_engine['cluster_name'], json.dumps(data_engine['slave_labels']))
try:
local("~/scripts/{}.py {}".format('common_create_instance', params))
except:
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org