spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From pwend...@apache.org
Subject git commit: EC2 configurable workers
Date Sat, 03 May 2014 23:52:59 GMT
Repository: spark
Updated Branches:
  refs/heads/master 34719ba32 -> 4669a84ab


EC2 configurable workers

Added option to configure number of worker instances and to set SPARK_MASTER_OPTS

Depends on: https://github.com/mesos/spark-ec2/pull/46

Author: Allan Douglas R. de Oliveira <allan@chaordicsystems.com>

Closes #612 from douglaz/ec2_configurable_workers and squashes the following commits:

d6c5d65 [Allan Douglas R. de Oliveira] Added master opts parameter
6c34671 [Allan Douglas R. de Oliveira] Use number of worker instances as string on template
ba528b9 [Allan Douglas R. de Oliveira] Added SPARK_WORKER_INSTANCES parameter


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/4669a84a
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/4669a84a
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/4669a84a

Branch: refs/heads/master
Commit: 4669a84ab10296e01a5fbbae1de9574b793b7ed5
Parents: 34719ba
Author: Allan Douglas R. de Oliveira <allan@chaordicsystems.com>
Authored: Sat May 3 16:52:19 2014 -0700
Committer: Patrick Wendell <pwendell@gmail.com>
Committed: Sat May 3 16:52:19 2014 -0700

----------------------------------------------------------------------
 ec2/deploy.generic/root/spark-ec2/ec2-variables.sh |  2 ++
 ec2/spark_ec2.py                                   | 12 ++++++++++--
 2 files changed, 12 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/4669a84a/ec2/deploy.generic/root/spark-ec2/ec2-variables.sh
----------------------------------------------------------------------
diff --git a/ec2/deploy.generic/root/spark-ec2/ec2-variables.sh b/ec2/deploy.generic/root/spark-ec2/ec2-variables.sh
index 42e8faa..3570891 100644
--- a/ec2/deploy.generic/root/spark-ec2/ec2-variables.sh
+++ b/ec2/deploy.generic/root/spark-ec2/ec2-variables.sh
@@ -28,3 +28,5 @@ export SPARK_VERSION="{{spark_version}}"
 export SHARK_VERSION="{{shark_version}}"
 export HADOOP_MAJOR_VERSION="{{hadoop_major_version}}"
 export SWAP_MB="{{swap}}"
+export SPARK_WORKER_INSTANCES="{{spark_worker_instances}}"
+export SPARK_MASTER_OPTS="{{spark_master_opts}}"

http://git-wip-us.apache.org/repos/asf/spark/blob/4669a84a/ec2/spark_ec2.py
----------------------------------------------------------------------
diff --git a/ec2/spark_ec2.py b/ec2/spark_ec2.py
index 31209a6..db39374 100755
--- a/ec2/spark_ec2.py
+++ b/ec2/spark_ec2.py
@@ -103,6 +103,12 @@ def parse_args():
       help="When destroying a cluster, delete the security groups that were created")
   parser.add_option("--use-existing-master", action="store_true", default=False,
       help="Launch fresh slaves, but use an existing stopped master if possible")
+  parser.add_option("--worker-instances", type="int", default=1,
+      help="Number of instances per worker: variable SPARK_WORKER_INSTANCES (default: 1)")
+  parser.add_option("--master-opts", type="string", default="",
+      help="Extra options to give to master through SPARK_MASTER_OPTS variable (e.g -Dspark.worker.timeout=180)")
+
+
 
   (opts, args) = parser.parse_args()
   if len(args) != 2:
@@ -223,7 +229,7 @@ def launch_cluster(conn, opts, cluster_name):
     sys.exit(1)
   if opts.key_pair is None:
     print >> stderr, "ERROR: Must provide a key pair name (-k) to use on instances."
-    sys.exit(1)    
+    sys.exit(1)
   print "Setting up security groups..."
   master_group = get_or_make_group(conn, cluster_name + "-master")
   slave_group = get_or_make_group(conn, cluster_name + "-slaves")
@@ -551,7 +557,9 @@ def deploy_files(conn, root_dir, opts, master_nodes, slave_nodes, modules):
     "modules": '\n'.join(modules),
     "spark_version": spark_v,
     "shark_version": shark_v,
-    "hadoop_major_version": opts.hadoop_major_version
+    "hadoop_major_version": opts.hadoop_major_version,
+    "spark_worker_instances": "%d" % opts.worker_instances,
+    "spark_master_opts": opts.master_opts
   }
 
   # Create a temp directory in which we will place all the files to be


Mime
View raw message