You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sh...@apache.org on 2015/06/22 20:43:14 UTC
spark git commit: [SPARK-8429] [EC2] Add ability to set additional
tags
Repository: spark
Updated Branches:
refs/heads/master 0818fdec3 -> 42a1f716f
[SPARK-8429] [EC2] Add ability to set additional tags
Add the `--additional-tags` parameter that allows to set additional tags to all the created instances (masters and slaves).
The user can specify multiple tags by separating them with a comma (`,`), while each tag name and value should be separated by a colon (`:`); for example, `Task:MySparkProject,Env:production` would add two tags, `Task` and `Env`, with the given values.
Author: Stefano Parmesan <s....@gmail.com>
Closes #6857 from armisael/patch-1 and squashes the following commits:
c5ac92c [Stefano Parmesan] python style (pep8)
8e614f1 [Stefano Parmesan] Set multiple tags in a single request
bfc56af [Stefano Parmesan] Address SPARK-7900 by inceasing sleep time
daf8615 [Stefano Parmesan] Add ability to set additional tags
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/42a1f716
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/42a1f716
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/42a1f716
Branch: refs/heads/master
Commit: 42a1f716fa35533507784be5e9117a984a03e62d
Parents: 0818fde
Author: Stefano Parmesan <s....@gmail.com>
Authored: Mon Jun 22 11:43:10 2015 -0700
Committer: Shivaram Venkataraman <sh...@cs.berkeley.edu>
Committed: Mon Jun 22 11:43:10 2015 -0700
----------------------------------------------------------------------
ec2/spark_ec2.py | 28 ++++++++++++++++++++--------
1 file changed, 20 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/42a1f716/ec2/spark_ec2.py
----------------------------------------------------------------------
diff --git a/ec2/spark_ec2.py b/ec2/spark_ec2.py
index 5608749..1037356 100755
--- a/ec2/spark_ec2.py
+++ b/ec2/spark_ec2.py
@@ -290,6 +290,10 @@ def parse_args():
"--additional-security-group", type="string", default="",
help="Additional security group to place the machines in")
parser.add_option(
+ "--additional-tags", type="string", default="",
+ help="Additional tags to set on the machines; tags are comma-separated, while name and " +
+ "value are colon separated; ex: \"Task:MySparkProject,Env:production\"")
+ parser.add_option(
"--copy-aws-credentials", action="store_true", default=False,
help="Add AWS credentials to hadoop configuration to allow Spark to access S3")
parser.add_option(
@@ -684,16 +688,24 @@ def launch_cluster(conn, opts, cluster_name):
# This wait time corresponds to SPARK-4983
print("Waiting for AWS to propagate instance metadata...")
- time.sleep(5)
- # Give the instances descriptive names
+ time.sleep(15)
+
+ # Give the instances descriptive names and set additional tags
+ additional_tags = {}
+ if opts.additional_tags.strip():
+ additional_tags = dict(
+ map(str.strip, tag.split(':', 1)) for tag in opts.additional_tags.split(',')
+ )
+
for master in master_nodes:
- master.add_tag(
- key='Name',
- value='{cn}-master-{iid}'.format(cn=cluster_name, iid=master.id))
+ master.add_tags(
+ dict(additional_tags, Name='{cn}-master-{iid}'.format(cn=cluster_name, iid=master.id))
+ )
+
for slave in slave_nodes:
- slave.add_tag(
- key='Name',
- value='{cn}-slave-{iid}'.format(cn=cluster_name, iid=slave.id))
+ slave.add_tags(
+ dict(additional_tags, Name='{cn}-slave-{iid}'.format(cn=cluster_name, iid=slave.id))
+ )
# Return all the instances
return (master_nodes, slave_nodes)
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org