Skip to content

Commit baa30fc

Browse files
committed
Use new Spark EC2 scripts by default
1 parent fadeb1d commit baa30fc

File tree

2 files changed

+8
-8
lines changed

2 files changed

+8
-8
lines changed

ec2/README

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
This folder contains a script, spark-ec2, for launching Spark clusters on
22
Amazon EC2. Usage instructions are available online at:
33

4-
https://github.com/mesos/spark/wiki/Running-Spark-on-Amazon-EC2
4+
http://spark-project.org/docs/latest/ec2-scripts.html

ec2/spark_ec2.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
from boto import ec2
3636

3737
# A static URL from which to figure out the latest Mesos EC2 AMI
38-
LATEST_AMI_URL = "https://s3.amazonaws.com/mesos-images/ids/latest-spark-0.6"
38+
LATEST_AMI_URL = "https://s3.amazonaws.com/mesos-images/ids/latest-spark-0.7"
3939

4040

4141
# Configure and parse our command-line arguments
@@ -83,16 +83,16 @@ def parse_args():
8383
help="If specified, launch slaves as spot instances with the given " +
8484
"maximum price (in dollars)")
8585
parser.add_option("--cluster-type", type="choice", metavar="TYPE",
86-
choices=["mesos", "standalone"], default="mesos",
86+
choices=["mesos", "standalone"], default="standalone",
8787
help="'mesos' for a Mesos cluster, 'standalone' for a standalone " +
88-
"Spark cluster (default: mesos)")
88+
"Spark cluster (default: standalone)")
8989
parser.add_option("--ganglia", action="store_true", default=True,
9090
help="Setup Ganglia monitoring on cluster (default: on). NOTE: " +
9191
"the Ganglia page will be publicly accessible")
9292
parser.add_option("--no-ganglia", action="store_false", dest="ganglia",
9393
help="Disable Ganglia monitoring for the cluster")
94-
parser.add_option("--new-scripts", action="store_true", default=False,
95-
help="Use new spark-ec2 scripts, for Spark >= 0.7 AMIs")
94+
parser.add_option("--old-scripts", action="store_true", default=False,
95+
help="Use old mesos-ec2 scripts, for Spark <= 0.6 AMIs")
9696
parser.add_option("-u", "--user", default="root",
9797
help="The SSH user you want to connect as (default: root)")
9898
parser.add_option("--delete-groups", action="store_true", default=False,
@@ -383,7 +383,7 @@ def setup_cluster(conn, master_nodes, slave_nodes, zoo_nodes, opts, deploy_ssh_k
383383
if opts.ganglia:
384384
modules.append('ganglia')
385385

386-
if opts.new_scripts:
386+
if not opts.old_scripts:
387387
# NOTE: We should clone the repository before running deploy_files to
388388
# prevent ec2-variables.sh from being overwritten
389389
ssh(master, opts, "rm -rf spark-ec2 && git clone https://github.com/mesos/spark-ec2.git")
@@ -393,7 +393,7 @@ def setup_cluster(conn, master_nodes, slave_nodes, zoo_nodes, opts, deploy_ssh_k
393393
zoo_nodes, modules)
394394

395395
print "Running setup on master..."
396-
if not opts.new_scripts:
396+
if opts.old_scripts:
397397
if opts.cluster_type == "mesos":
398398
setup_mesos_cluster(master, opts)
399399
elif opts.cluster_type == "standalone":

0 commit comments

Comments
 (0)