Skip to content

Commit

Permalink
improving ec2 script to work with hvm instances
Browse files Browse the repository at this point in the history
  • Loading branch information
Danny Bickson committed Oct 31, 2013
1 parent 2d366df commit 20353f6
Showing 1 changed file with 10 additions and 3 deletions.
13 changes: 10 additions & 3 deletions scripts/ec2/gl_ec2.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@

# A static URL from which to figure out the latest GraphLab EC2 AMI
STD_AMI_URL = "https://s3.amazonaws.com/GraphLabGit/graphlab2-std"
HVM_AMI_URL = "https://s3.amazonaws.com/GraphLabGit/graphlab2-hvm"

compilation_threads = 4

Expand Down Expand Up @@ -209,7 +210,13 @@ def launch_cluster(conn, opts, cluster_name):
print "GraphLab AMI for Standard Instances: " + opts.ami
except:
print >> stderr, "Could not read " + STD_AMI_URL

elif opts.ami == "hpc" :
try:
opts.ami = urllib2.urlopen(HVM_AMI_URL).read().strip()
print "GraphLab AMI for HPC Instances: " + opts.ami
except:
print >> stderr, "Could not read " + HVM_AMI_URL

print "Launching instances..."
try:
image = conn.get_all_images(image_ids=[opts.ami])[0]
Expand Down Expand Up @@ -615,7 +622,7 @@ def main():
#hadoop fs -rmr hdfs://\`head -n 1 ~/machines\`/smallnetflix/;
#hadoop fs -copyFromLocal smallnetflix/ /;
#cat ~/machines
mpiexec.openmpi -hostfile ~/machines -n %d /home/ubuntu/graphlab/release/toolkits/collaborative_filtering/als --matrix /home/ubuntu/graphlab/release/toolkits/collaborative_filtering/smallnetflix --max_iter=5 --ncpus=%d --predictions=out_predictions --minval=1 --maxval=5 --D=100;
mpiexec.openmpi -hostfile ~/machines -n %d /home/ubuntu/graphlab/release/toolkits/collaborative_filtering/als --matrix /home/ubuntu/graphlab/release/toolkits/collaborative_filtering/smallnetflix/ --max_iter=5 --ncpus=%d --predictions=out_predictions --minval=1 --maxval=5 --D=100;
\"""" % (opts.identity_file, proxy_opt, master, opts.slaves+1,compilation_threads), shell=True)
elif action == "pagerank_demo":
(master_nodes, slave_nodes, zoo_nodes) = get_existing_cluster(
Expand Down Expand Up @@ -652,7 +659,7 @@ def main():
#hadoop fs -rmr hdfs://\`head -n 1 ~/machines\`/livejournal/;
#hadoop fs -copyFromLocal livejournal/ /;
#cat ~/machines
mpiexec.openmpi -hostfile ~/machines -n %d /home/ubuntu/graphlab/release/toolkits/collaborative_filtering/svd --matrix /home/ubuntu/graphlab/release/toolkits/collaborative_filtering/livejournal --rows=4847572 --cols=4847571 --nsv=2 --nv=7 --max_iter=3 --tol=1e-2 --binary=true --save_vectors=1 --ncpus=%d --input_file_offset=0 ;
mpiexec.openmpi -hostfile ~/machines -n %d /home/ubuntu/graphlab/release/toolkits/collaborative_filtering/svd --matrix /home/ubuntu/graphlab/release/toolkits/collaborative_filtering/livejournal --rows=4847572 --cols=4847571 --nsv=2 --nv=7 --max_iter=3 --tol=1e-2 --binary=true --save_vectors=1 --ncpus=%d --input_file_offset=0 --ortho_repeats=1 ;
\"""" % (opts.identity_file, proxy_opt, master, opts.slaves+1, compilation_threads), shell=True)


Expand Down

0 comments on commit 20353f6

Please sign in to comment.