spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From pwend...@apache.org
Subject git commit: SPARK-2246: Add user-data option to EC2 scripts
Date Sun, 03 Aug 2014 17:28:20 GMT
Repository: spark
Updated Branches:
  refs/heads/branch-1.1 1992175fd -> 162fc9512


SPARK-2246: Add user-data option to EC2 scripts

Author: Allan Douglas R. de Oliveira <allan@chaordicsystems.com>

Closes #1186 from douglaz/spark_ec2_user_data and squashes the following commits:

94a36f9 [Allan Douglas R. de Oliveira] Added user data option to EC2 script
(cherry picked from commit a0bcbc159e89be868ccc96175dbf1439461557e1)

Signed-off-by: Patrick Wendell <pwendell@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/162fc951
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/162fc951
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/162fc951

Branch: refs/heads/branch-1.1
Commit: 162fc9512018e0c592b3aaa29d405f511461795a
Parents: 1992175
Author: Allan Douglas R. de Oliveira <allan@chaordicsystems.com>
Authored: Sun Aug 3 10:25:59 2014 -0700
Committer: Patrick Wendell <pwendell@gmail.com>
Committed: Sun Aug 3 10:28:15 2014 -0700

----------------------------------------------------------------------
 ec2/spark_ec2.py | 16 ++++++++++++++--
 1 file changed, 14 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/162fc951/ec2/spark_ec2.py
----------------------------------------------------------------------
diff --git a/ec2/spark_ec2.py b/ec2/spark_ec2.py
index 02cfe4e..0c2f85a 100755
--- a/ec2/spark_ec2.py
+++ b/ec2/spark_ec2.py
@@ -135,6 +135,10 @@ def parse_args():
         "--master-opts", type="string", default="",
         help="Extra options to give to master through SPARK_MASTER_OPTS variable " +
              "(e.g -Dspark.worker.timeout=180)")
+    parser.add_option(
+        "--user-data", type="string", default="",
+        help="Path to a user-data file (most AMI's interpret this as an initialization script)")
+
 
     (opts, args) = parser.parse_args()
     if len(args) != 2:
@@ -274,6 +278,12 @@ def launch_cluster(conn, opts, cluster_name):
     if opts.key_pair is None:
         print >> stderr, "ERROR: Must provide a key pair name (-k) to use on instances."
         sys.exit(1)
+
+    user_data_content = None
+    if opts.user_data:
+        with open(opts.user_data) as user_data_file:
+            user_data_content = user_data_file.read()
+
     print "Setting up security groups..."
     master_group = get_or_make_group(conn, cluster_name + "-master")
     slave_group = get_or_make_group(conn, cluster_name + "-slaves")
@@ -347,7 +357,8 @@ def launch_cluster(conn, opts, cluster_name):
                 key_name=opts.key_pair,
                 security_groups=[slave_group],
                 instance_type=opts.instance_type,
-                block_device_map=block_map)
+                block_device_map=block_map,
+                user_data=user_data_content)
             my_req_ids += [req.id for req in slave_reqs]
             i += 1
 
@@ -398,7 +409,8 @@ def launch_cluster(conn, opts, cluster_name):
                                       placement=zone,
                                       min_count=num_slaves_this_zone,
                                       max_count=num_slaves_this_zone,
-                                      block_device_map=block_map)
+                                      block_device_map=block_map,
+                                      user_data=user_data_content)
                 slave_nodes += slave_res.instances
                 print "Launched %d slaves in %s, regid = %s" % (num_slaves_this_zone,
                                                                 zone, slave_res.id)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message