From a07bd286bb899e99f5992b163a74497047fce448 Mon Sep 17 00:00:00 2001
From: Patrick Wendell <pwendell@gmail.com>
Date: Sat, 18 May 2013 16:00:32 -0700
Subject: [PATCH] Some changes in response to Josh

---
 ec2/spark_ec2.py | 9 ++++-----
 1 file changed, 4 insertions(+), 5 deletions(-)

diff --git a/ec2/spark_ec2.py b/ec2/spark_ec2.py
index d05b767cb8..7df7ae2ae4 100755
--- a/ec2/spark_ec2.py
+++ b/ec2/spark_ec2.py
@@ -186,12 +186,11 @@ def get_spark_ami(opts):
     instance_type = "pvm"
     print >> stderr,\
         "Don't recognize %s, assuming type is pvm" % opts.instance_type
-  if version not in ["latest", "v0.7.0"]:
+  
+  version = version.replace("v", "")
+  if version not in ["latest", "0.7.0"]:
     print >> stderr, \
       "Don't know how to resolve AMI for version: %s" % version
-  # TODO(pwendell) Once we have multiple Spark AMI versions, we should let 
-  # people give a version flag here in place of just saying 'latest'.
-  version = version[1:]
   ami_path = "%s/%s/%s/%s" % (AMI_PREFIX, version, opts.region, instance_type)
   try:
     ami = urllib2.urlopen(ami_path).read().strip()
@@ -253,7 +252,7 @@ def launch_cluster(conn, opts, cluster_name):
     sys.exit(1)
 
   # Figure out Spark AMI
-  if opts.ami[0] == "v":
+  if "ami" not in opts.ami:
     opts.ami = get_spark_ami(opts)
   print "Launching instances..."
 
-- 
GitLab