You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@dlab.apache.org by dm...@apache.org on 2019/07/17 07:16:35 UTC
[incubator-dlab] branch feature/projects updated: Spark cluster
configuration fails fixed EMR configuration fails fixed
This is an automated email from the ASF dual-hosted git repository.
dmysakovets pushed a commit to branch feature/projects
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git
The following commit(s) were added to refs/heads/feature/projects by this push:
new eb1148e Spark cluster configuration fails fixed EMR configuration fails fixed
new 521fbb1 Merge pull request #163 from bodnarmykola/DLAB-748
eb1148e is described below
commit eb1148eec699aa6f6fda499263b884a82c1b277e
Author: Mykola_Bodnar1 <bo...@gmail.com>
AuthorDate: Wed Jul 17 10:09:50 2019 +0300
Spark cluster configuration fails fixed
EMR configuration fails fixed
---
.../src/general/lib/aws/actions_lib.py | 12 ++++++------
.../scripts/aws/common_notebook_configure_dataengine.py | 1 -
2 files changed, 6 insertions(+), 7 deletions(-)
diff --git a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
index d8e00f3..99767b5 100644
--- a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
@@ -1365,13 +1365,13 @@ def create_image_from_instance(tag_name='', instance_name='', image_name='', tag
def install_emr_spark(args):
s3_client = boto3.client('s3', config=Config(signature_version='s3v4'), region_name=args.region)
- s3_client.download_file(args.bucket, args.user_name + '/' + args.cluster_name + '/spark.tar.gz',
+ s3_client.download_file(args.bucket, args.project_name + '/' + args.cluster_name + '/spark.tar.gz',
'/tmp/spark.tar.gz')
- s3_client.download_file(args.bucket, args.user_name + '/' + args.cluster_name + '/spark-checksum.chk',
+ s3_client.download_file(args.bucket, args.project_name + '/' + args.cluster_name + '/spark-checksum.chk',
'/tmp/spark-checksum.chk')
if 'WARNING' in local('md5sum -c /tmp/spark-checksum.chk', capture=True):
local('rm -f /tmp/spark.tar.gz')
- s3_client.download_file(args.bucket, args.user_name + '/' + args.cluster_name + '/spark.tar.gz',
+ s3_client.download_file(args.bucket, args.project_name + '/' + args.cluster_name + '/spark.tar.gz',
'/tmp/spark.tar.gz')
if 'WARNING' in local('md5sum -c /tmp/spark-checksum.chk', capture=True):
print("The checksum of spark.tar.gz is mismatched. It could be caused by aws network issue.")
@@ -1403,9 +1403,9 @@ def yarn(args, yarn_dir):
else:
s3client = boto3.client('s3', config=Config(signature_version='s3v4'), region_name=args.region)
s3resource = boto3.resource('s3', config=Config(signature_version='s3v4'))
- get_files(s3client, s3resource, args.user_name + '/' + args.cluster_name + '/config/', args.bucket, yarn_dir)
- local('sudo mv ' + yarn_dir + args.user_name + '/' + args.cluster_name + '/config/* ' + yarn_dir)
- local('sudo rm -rf ' + yarn_dir + args.user_name + '/')
+ get_files(s3client, s3resource, args.project_name + '/' + args.cluster_name + '/config/', args.bucket, yarn_dir)
+ local('sudo mv ' + yarn_dir + args.project_name + '/' + args.cluster_name + '/config/* ' + yarn_dir)
+ local('sudo rm -rf ' + yarn_dir + args.project_name + '/')
def get_files(s3client, s3resource, dist, bucket, local):
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py
index efedcd8..c5a3573 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py
@@ -56,7 +56,6 @@ if __name__ == "__main__":
notebook_config['region'] = os.environ['aws_region']
notebook_config['tag_name'] = notebook_config['service_base_name'] + '-Tag'
notebook_config['project_name'] = os.environ['project_name']
- notebook_config['project_tag'] = os.environ['project_tag']
notebook_config['cluster_name'] = notebook_config['service_base_name'] + '-' + notebook_config['project_name'] + \
'-de-' + notebook_config['exploratory_name'] + '-' + \
notebook_config['computational_name']
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org