You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@dlab.apache.org by my...@apache.org on 2020/01/29 13:01:05 UTC

[incubator-dlab] branch DLAB-1487 created (now f8de1c2)

This is an automated email from the ASF dual-hosted git repository.

mykolabodnar pushed a change to branch DLAB-1487
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git.


      at f8de1c2  [DLAB-1487]: [Azure] Shared endpoint bucket should be created during project creation

This branch includes the following new commits:

     new f6ad61d  [DLAB-1487]: [AWS] Shared endpoint bucket should be created during project creation
     new f8de1c2  [DLAB-1487]: [Azure] Shared endpoint bucket should be created during project creation

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org


[incubator-dlab] 01/02: [DLAB-1487]: [AWS] Shared endpoint bucket should be created during project creation

Posted by my...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mykolabodnar pushed a commit to branch DLAB-1487
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git

commit f6ad61da70ea835d3b6cd2100f93b3ea2b8b2a4c
Author: Mykola_Bodnar1 <bo...@gmail.com>
AuthorDate: Wed Jan 29 12:25:03 2020 +0200

    [DLAB-1487]: [AWS] Shared endpoint bucket should be created during project creation
---
 .../general/scripts/aws/common_create_policy.py    |  3 --
 .../src/general/scripts/aws/project_prepare.py     | 22 +++++++----
 .../src/general/scripts/aws/ssn_prepare.py         | 46 ----------------------
 3 files changed, 14 insertions(+), 57 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_create_policy.py b/infrastructure-provisioning/src/general/scripts/aws/common_create_policy.py
index 3cad721..d7f5ee0 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_create_policy.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_create_policy.py
@@ -29,7 +29,6 @@ import boto3, botocore
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--bucket_name', type=str, default='')
-parser.add_argument('--ssn_bucket_name', type=str, default='')
 parser.add_argument('--shared_bucket_name', type=str, default='')
 parser.add_argument('--service_base_name', type=str, default='')
 parser.add_argument('--username', type=str, default='')
@@ -46,8 +45,6 @@ if __name__ == "__main__":
             handler = open('/root/templates/edge_s3_policy.json', 'r')
             policy = handler.read()
             policy = policy.replace('BUCKET_NAME', args.bucket_name)
-            # Removed for multiple Endpoints per project
-            # policy = policy.replace('SSN_BUCK', args.ssn_bucket_name)
             policy = policy.replace('SHARED_BUCK', args.shared_bucket_name)
             if args.region == 'cn-north-1':
                 policy = policy.replace('aws', 'aws-cn')
diff --git a/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py
index 8de0370..019eace 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py
@@ -61,10 +61,9 @@ if __name__ == "__main__":
                                                                   project_conf['project_name'],
                                                                   project_conf['endpoint_name'])
     project_conf['bucket_name'] = project_conf['bucket_name_tag'].lower().replace('_', '-')
-    project_conf['ssn_bucket_name'] = '{}-ssn-bucket'.format(
-        project_conf['service_base_name']).lower().replace('_', '-')
-    project_conf['shared_bucket_name'] = '{0}-{1}-shared-bucket'.format(
-        project_conf['service_base_name'], project_conf['endpoint_tag']).lower().replace('_', '-')
+    project_conf['shared_bucket_name_tag'] = '{0}-{1}-shared-bucket'.format(
+        project_conf['service_base_name'], project_conf['endpoint_tag'])
+    project_conf['shared_bucket_name'] = project_conf['shared_bucket_name_tag'].lower().replace('_', '-')
     project_conf['edge_role_name'] = '{}-{}-edge-Role'.format(
         project_conf['service_base_name'].lower().replace('-', '_'), os.environ['project_name'])
     project_conf['edge_role_profile_name'] = '{}-{}-edge-Profile'.format(
@@ -521,7 +520,7 @@ if __name__ == "__main__":
             raise Exception
     except Exception as err:
         print('Error: {0}'.format(err))
-        append_result("Failed to create bucket.", str(err))
+        append_result("Failed to create security group.", str(err))
         remove_all_iam_resources('notebook', os.environ['project_name'])
         remove_all_iam_resources('edge', os.environ['project_name'])
         remove_sgroups(project_conf['dataengine_instances_name'])
@@ -532,6 +531,14 @@ if __name__ == "__main__":
     try:
         logging.info('[CREATE BUCKETS]')
         print('[CREATE BUCKETS]')
+        params = "--bucket_name {} --infra_tag_name {} --infra_tag_value {} --region {} --bucket_name_tag {}". \
+            format(project_conf['shared_bucket_name'], project_conf['tag_name'], project_conf['shared_bucket_name'], project_conf['region'], project_conf['shared_bucket_name_tag'])
+        try:
+            local("~/scripts/{}.py {}".format('common_create_bucket', params))
+        except:
+            traceback.print_exc()
+            raise Exception
+
         params = "--bucket_name {} --infra_tag_name {} --infra_tag_value {} --region {} --bucket_name_tag {}" \
                  .format(project_conf['bucket_name'], project_conf['tag_name'], project_conf['bucket_name'],
                          project_conf['region'], project_conf['bucket_name_tag'])
@@ -542,7 +549,7 @@ if __name__ == "__main__":
             raise Exception
     except Exception as err:
         print('Error: {0}'.format(err))
-        append_result("Failed to create bucket.", str(err))
+        append_result("Failed to create buckets.", str(err))
         remove_all_iam_resources('notebook', os.environ['project_name'])
         remove_all_iam_resources('edge', os.environ['project_name'])
         remove_sgroups(project_conf['dataengine_instances_name'])
@@ -553,10 +560,9 @@ if __name__ == "__main__":
     try:
         logging.info('[CREATING BUCKET POLICY FOR PROJECT INSTANCES]')
         print('[CREATING BUCKET POLICY FOR USER INSTANCES]')
-        params = '--bucket_name {} --ssn_bucket_name {} --shared_bucket_name {} --username {} --edge_role_name {} ' \
+        params = '--bucket_name {} --shared_bucket_name {} --username {} --edge_role_name {} ' \
                  '--notebook_role_name {} --service_base_name {} --region {} ' \
                  '--user_predefined_s3_policies "{}"'.format(project_conf['bucket_name'],
-                                                             project_conf['ssn_bucket_name'],
                                                              project_conf['shared_bucket_name'],
                                                              os.environ['project_name'], project_conf['edge_role_name'],
                                                              project_conf['notebook_dataengine_role_name'],
diff --git a/infrastructure-provisioning/src/general/scripts/aws/ssn_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/ssn_prepare.py
index 9acf264..8615a25 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/ssn_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/ssn_prepare.py
@@ -61,11 +61,7 @@ if __name__ == "__main__":
         role_name = service_base_name.lower().replace('-', '_') + '-ssn-Role'
         role_profile_name = service_base_name.lower().replace('-', '_') + '-ssn-Profile'
         policy_name = service_base_name.lower().replace('-', '_') + '-ssn-Policy'
-        ssn_bucket_name_tag = service_base_name + '-ssn-bucket'
         default_endpoint_name = os.environ['default_endpoint_name']
-        shared_bucket_name_tag = '{0}-{1}-shared-bucket'.format(service_base_name, default_endpoint_name)
-        ssn_bucket_name = ssn_bucket_name_tag.lower().replace('_', '-')
-        shared_bucket_name = shared_bucket_name_tag.lower().replace('_', '-')
         tag_name = service_base_name + '-Tag'
         tag2_name = service_base_name + '-secondary-Tag'
         user_tag = "{0}:{0}-ssn-Role".format(service_base_name)
@@ -386,48 +382,6 @@ if __name__ == "__main__":
                 remove_route_tables(tag2_name, True)
                 remove_vpc(os.environ['aws_vpc2_id'])
             sys.exit(1)
-    try:
-        logging.info('[CREATE BUCKETS]')
-        print('[CREATE BUCKETS]')
-        params = "--bucket_name {} --infra_tag_name {} --infra_tag_value {} --region {} --bucket_name_tag {}". \
-                 format(ssn_bucket_name, tag_name, ssn_bucket_name, region, ssn_bucket_name_tag)
-
-        try:
-            local("~/scripts/{}.py {}".format('common_create_bucket', params))
-        except:
-            traceback.print_exc()
-            raise Exception
-
-        params = "--bucket_name {} --infra_tag_name {} --infra_tag_value {} --region {} --bucket_name_tag {}". \
-                 format(shared_bucket_name, tag_name, shared_bucket_name, region, shared_bucket_name_tag)
-
-        try:
-            local("~/scripts/{}.py {}".format('common_create_bucket', params))
-        except:
-            traceback.print_exc()
-            raise Exception
-    except Exception as err:
-        print('Error: {0}'.format(err))
-        append_result("Unable to create bucket.", str(err))
-        remove_all_iam_resources(instance)
-        if pre_defined_sg:
-            remove_sgroups(tag_name)
-        if pre_defined_subnet:
-            remove_internet_gateways(os.environ['aws_vpc_id'], tag_name, service_base_name)
-            remove_subnets(service_base_name + "-subnet")
-        if pre_defined_vpc:
-            remove_vpc_endpoints(os.environ['aws_vpc_id'])
-            remove_route_tables(tag_name, True)
-            remove_vpc(os.environ['aws_vpc_id'])
-        if pre_defined_vpc2:
-            remove_peering('*')
-            try:
-                remove_vpc_endpoints(os.environ['aws_vpc2_id'])
-            except:
-                print("There are no VPC Endpoints")
-            remove_route_tables(tag2_name, True)
-            remove_vpc(os.environ['aws_vpc2_id'])
-        sys.exit(1)
 
     try:
         logging.info('[CREATE SSN INSTANCE]')


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org


[incubator-dlab] 02/02: [DLAB-1487]: [Azure] Shared endpoint bucket should be created during project creation

Posted by my...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mykolabodnar pushed a commit to branch DLAB-1487
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git

commit f8de1c2bfce89ddcd95cfe4179b3d14165d4c71c
Author: Mykola_Bodnar1 <bo...@gmail.com>
AuthorDate: Wed Jan 29 15:00:34 2020 +0200

    [DLAB-1487]: [Azure] Shared endpoint bucket should be created during project creation
---
 .../src/general/scripts/azure/project_prepare.py   | 39 ++++++++++++
 .../src/general/scripts/azure/ssn_prepare.py       | 73 +---------------------
 2 files changed, 40 insertions(+), 72 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py b/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py
index df4cfae..c485ff9 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py
@@ -97,6 +97,14 @@ if __name__ == "__main__":
                                                 "endpoint_tag": project_conf['endpoint_tag'],
                                                 os.environ['conf_billing_tag_key']: os.environ['conf_billing_tag_value']}
         project_conf['primary_disk_size'] = '32'
+        project_conf['default_endpoint_name'] = os.environ['default_endpoint_name']
+        project_conf['shared_storage_account_name'] = '{0}-{1}-shared-storage'.format(project_conf['service_base_name'],
+                                                                                  project_conf['default_endpoint_name'])
+        project_conf['shared_container_name'] = '{}-shared-container'.format(project_conf['service_base_name']).lower()
+        project_conf['shared_storage_account_tags'] = {"Name": project_conf['shared_storage_account_name'],
+                                                   "SBN": project_conf['service_base_name'],
+                                                   os.environ['conf_billing_tag_key']: os.environ[
+                                                       'conf_billing_tag_value']}
 
         # FUSE in case of absence of user's key
         try:
@@ -672,6 +680,31 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
+        logging.info('[CREATE SHARED STORAGE ACCOUNT AND CONTAINER]')
+        print('[CREATE SHARED STORAGE ACCOUNT AND CONTAINER]')
+        params = "--container_name {} --account_tags '{}' --resource_group_name {} --region {}". \
+            format(project_conf['shared_container_name'], json.dumps(project_conf['shared_storage_account_tags']),
+                   project_conf['resource_group_name'], project_conf['region'])
+        local("~/scripts/{}.py {}".format('common_create_storage_account', params))
+    except Exception as err:
+        print('Error: {0}'.format(err))
+        append_result("Failed to create storage account.", str(err))
+        AzureActions().remove_subnet(project_conf['resource_group_name'], project_conf['vpc_name'],
+                                     project_conf['private_subnet_name'])
+        AzureActions().remove_security_group(project_conf['resource_group_name'],
+                                             project_conf['edge_security_group_name'])
+        AzureActions().remove_security_group(project_conf['resource_group_name'],
+                                             project_conf['notebook_security_group_name'])
+        AzureActions().remove_security_group(project_conf['resource_group_name'],
+                                             project_conf['master_security_group_name'])
+        AzureActions().remove_security_group(project_conf['resource_group_name'],
+                                             project_conf['slave_security_group_name'])
+        for storage_account in AzureMeta().list_storage_accounts(project_conf['resource_group_name']):
+            if project_conf['shared_storage_account_name'] == storage_account.tags["Name"]:
+                AzureActions().remove_storage_account(project_conf['resource_group_name'], storage_account.name)
+        sys.exit(1)
+
+    try:
         logging.info('[CREATE STORAGE ACCOUNT AND CONTAINERS]')
         print('[CREATE STORAGE ACCOUNT AND CONTAINERS]')
 
@@ -697,6 +730,8 @@ if __name__ == "__main__":
         for storage_account in AzureMeta().list_storage_accounts(project_conf['resource_group_name']):
             if project_conf['edge_storage_account_name'] == storage_account.tags["Name"]:
                 AzureActions().remove_storage_account(project_conf['resource_group_name'], storage_account.name)
+            if project_conf['shared_storage_account_name'] == storage_account.tags["Name"]:
+                AzureActions().remove_storage_account(project_conf['resource_group_name'], storage_account.name)
         sys.exit(1)
 
     if os.environ['azure_datalake_enable'] == 'true':
@@ -726,6 +761,8 @@ if __name__ == "__main__":
             for storage_account in AzureMeta().list_storage_accounts(project_conf['resource_group_name']):
                 if project_conf['edge_storage_account_name'] == storage_account.tags["Name"]:
                     AzureActions().remove_storage_account(project_conf['resource_group_name'], storage_account.name)
+                if project_conf['shared_storage_account_name'] == storage_account.tags["Name"]:
+                    AzureActions().remove_storage_account(project_conf['resource_group_name'], storage_account.name)
             try:
                 for datalake in AzureMeta().list_datalakes(project_conf['resource_group_name']):
                     if project_conf['datalake_store_name'] == datalake.tags["Name"]:
@@ -777,6 +814,8 @@ if __name__ == "__main__":
         for storage_account in AzureMeta().list_storage_accounts(project_conf['resource_group_name']):
             if project_conf['edge_storage_account_name'] == storage_account.tags["Name"]:
                 AzureActions().remove_storage_account(project_conf['resource_group_name'], storage_account.name)
+            if project_conf['shared_storage_account_name'] == storage_account.tags["Name"]:
+                AzureActions().remove_storage_account(project_conf['resource_group_name'], storage_account.name)
         if os.environ['azure_datalake_enable'] == 'true':
             for datalake in AzureMeta().list_datalakes(project_conf['resource_group_name']):
                 if project_conf['datalake_store_name'] == datalake.tags["Name"]:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py
index e9dff4d..bb6c793 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py
@@ -62,12 +62,6 @@ if __name__ == "__main__":
         ssn_conf['vpc_cidr'] = os.environ['conf_vpc_cidr']
         ssn_conf['subnet_prefix'] = '20'
         ssn_conf['ssn_image_name'] = os.environ['azure_{}_image_name'.format(os.environ['conf_os_family'])]
-        ssn_conf['ssn_storage_account_name'] = '{}-ssn-storage'.format(ssn_conf['service_base_name'])
-        ssn_conf['ssn_container_name'] = '{}-ssn-container'.format(ssn_conf['service_base_name']).lower()
-        ssn_conf['default_endpoint_name'] = os.environ['default_endpoint_name']
-        ssn_conf['shared_storage_account_name'] = '{0}-{1}-shared-storage'.format(ssn_conf['service_base_name'],
-                                                                                  ssn_conf['default_endpoint_name'])
-        ssn_conf['shared_container_name'] = '{}-shared-container'.format(ssn_conf['service_base_name']).lower()
         ssn_conf['datalake_store_name'] = '{}-ssn-datalake'.format(ssn_conf['service_base_name'])
         ssn_conf['datalake_shared_directory_name'] = '{}-shared-folder'.format(ssn_conf['service_base_name'])
         ssn_conf['instance_name'] = '{}-ssn'.format(ssn_conf['service_base_name'])
@@ -82,12 +76,7 @@ if __name__ == "__main__":
         ssn_conf['instance_tags'] = {"Name": ssn_conf['instance_name'],
                                      "SBN": ssn_conf['service_base_name'],
                                      os.environ['conf_billing_tag_key']: os.environ['conf_billing_tag_value']}
-        ssn_conf['ssn_storage_account_tags'] = {"Name": ssn_conf['ssn_storage_account_name'],
-                                                "SBN": ssn_conf['service_base_name'],
-                                                os.environ['conf_billing_tag_key']: os.environ['conf_billing_tag_value']}
-        ssn_conf['shared_storage_account_tags'] = {"Name": ssn_conf['shared_storage_account_name'],
-                                                   "SBN": ssn_conf['service_base_name'],
-                                                   os.environ['conf_billing_tag_key']: os.environ['conf_billing_tag_value']}
+
         ssn_conf['datalake_store_tags'] = {"Name": ssn_conf['datalake_store_name'],
                                            "SBN": ssn_conf['service_base_name'],
                                            os.environ['conf_billing_tag_key']: os.environ['conf_billing_tag_value']}
@@ -254,58 +243,6 @@ if __name__ == "__main__":
         append_result("Failed to create Security group. Exception: " + str(err))
         sys.exit(1)
 
-    try:
-        logging.info('[CREATE SSN STORAGE ACCOUNT AND CONTAINER]')
-        print('[CREATE SSN STORAGE ACCOUNT AND CONTAINER]')
-        params = "--container_name {} --account_tags '{}' --resource_group_name {} --region {}". \
-                 format(ssn_conf['ssn_container_name'], json.dumps(ssn_conf['ssn_storage_account_tags']),
-                        ssn_conf['resource_group_name'], ssn_conf['region'])
-        local("~/scripts/{}.py {}".format('common_create_storage_account', params))
-    except Exception as err:
-        traceback.print_exc()
-        print('Error: {0}'.format(err))
-        if 'azure_resource_group_name' not in os.environ:
-            AzureActions().remove_resource_group(ssn_conf['service_base_name'], ssn_conf['region'])
-        if 'azure_vpc_name' not in os.environ:
-            AzureActions().remove_vpc(ssn_conf['resource_group_name'], ssn_conf['vpc_name'])
-        if 'azure_subnet_name' not in os.environ:
-            AzureActions().remove_subnet(ssn_conf['resource_group_name'], ssn_conf['vpc_name'],
-                                            ssn_conf['subnet_name'])
-        if 'azure_security_group_name' not in os.environ:
-            AzureActions().remove_security_group(ssn_conf['resource_group_name'], ssn_conf['security_group_name'])
-        for storage_account in AzureMeta().list_storage_accounts(ssn_conf['resource_group_name']):
-            if ssn_conf['ssn_storage_account_name'] == storage_account.tags["Name"]:
-                AzureActions().remove_storage_account(ssn_conf['resource_group_name'], storage_account.name)
-        append_result("Failed to create SSN storage account and container. Exception:" + str(err))
-        sys.exit(1)
-
-    try:
-        logging.info('[CREATE SHARED STORAGE ACCOUNT AND CONTAINER]')
-        print('[CREATE SHARED STORAGE ACCOUNT AND CONTAINER]')
-        params = "--container_name {} --account_tags '{}' --resource_group_name {} --region {}". \
-            format(ssn_conf['shared_container_name'], json.dumps(ssn_conf['shared_storage_account_tags']),
-                   ssn_conf['resource_group_name'], ssn_conf['region'])
-        local("~/scripts/{}.py {}".format('common_create_storage_account', params))
-    except Exception as err:
-        traceback.print_exc()
-        print('Error: {0}'.format(err))
-        if 'azure_resource_group_name' not in os.environ:
-            AzureActions().remove_resource_group(ssn_conf['service_base_name'], ssn_conf['region'])
-        if 'azure_vpc_name' not in os.environ:
-            AzureActions().remove_vpc(ssn_conf['resource_group_name'], ssn_conf['vpc_name'])
-        if 'azure_subnet_name' not in os.environ:
-            AzureActions().remove_subnet(ssn_conf['resource_group_name'], ssn_conf['vpc_name'],
-                                            ssn_conf['subnet_name'])
-        if 'azure_security_group_name' not in os.environ:
-            AzureActions().remove_security_group(ssn_conf['resource_group_name'], ssn_conf['security_group_name'])
-        for storage_account in AzureMeta().list_storage_accounts(ssn_conf['resource_group_name']):
-            if ssn_conf['ssn_storage_account_name'] == storage_account.tags["Name"]:
-                AzureActions().remove_storage_account(ssn_conf['resource_group_name'], storage_account.name)
-            if ssn_conf['shared_storage_account_name'] == storage_account.tags["Name"]:
-                AzureActions().remove_storage_account(ssn_conf['resource_group_name'], storage_account.name)
-        append_result("Failed to create SSN storage account and container. Exception:" + str(err))
-        sys.exit(1)
-
     if os.environ['azure_datalake_enable'] == 'true':
         try:
             logging.info('[CREATE DATA LAKE STORE]')
@@ -342,9 +279,6 @@ if __name__ == "__main__":
                                                 ssn_conf['subnet_name'])
             if 'azure_security_group_name' not in os.environ:
                 AzureActions().remove_security_group(ssn_conf['resource_group_name'], ssn_conf['security_group_name'])
-            for storage_account in AzureMeta().list_storage_accounts(ssn_conf['resource_group_name']):
-                if ssn_conf['ssn_storage_account_name'] == storage_account.tags["Name"]:
-                    AzureActions().remove_storage_account(ssn_conf['resource_group_name'], storage_account.name)
             for datalake in AzureMeta().list_datalakes(ssn_conf['resource_group_name']):
                 if ssn_conf['datalake_store_name'] == datalake.tags["Name"]:
                     AzureActions().delete_datalake_store(ssn_conf['resource_group_name'], datalake.name)
@@ -384,11 +318,6 @@ if __name__ == "__main__":
                                             ssn_conf['subnet_name'])
         if 'azure_security_group_name' not in os.environ:
             AzureActions().remove_security_group(ssn_conf['resource_group_name'], ssn_conf['security_group_name'])
-        for storage_account in AzureMeta().list_storage_accounts(ssn_conf['resource_group_name']):
-            if ssn_conf['ssn_storage_account_name'] == storage_account.tags["Name"]:
-                AzureActions().remove_storage_account(ssn_conf['resource_group_name'], storage_account.name)
-            if ssn_conf['shared_storage_account_name'] == storage_account.tags["Name"]:
-                AzureActions().remove_storage_account(ssn_conf['resource_group_name'], storage_account.name)
         for datalake in AzureMeta().list_datalakes(ssn_conf['resource_group_name']):
             if ssn_conf['datalake_store_name'] == datalake.tags["Name"]:
                 AzureActions().delete_datalake_store(ssn_conf['resource_group_name'], datalake.name)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org