Mercurial > repos > shellac > guppy_basecaller
comparison env/lib/python3.7/site-packages/boto/pyami/launch_ami.py @ 2:6af9afd405e9 draft
"planemo upload commit 0a63dd5f4d38a1f6944587f52a8cd79874177fc1"
author | shellac |
---|---|
date | Thu, 14 May 2020 14:56:58 -0400 |
parents | 26e78fe6e8c4 |
children |
comparison
equal
deleted
inserted
replaced
1:75ca89e9b81c | 2:6af9afd405e9 |
---|---|
1 #!/usr/bin/env python | |
2 # Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/ | |
3 # | |
4 # Permission is hereby granted, free of charge, to any person obtaining a | |
5 # copy of this software and associated documentation files (the | |
6 # "Software"), to deal in the Software without restriction, including | |
7 # without limitation the rights to use, copy, modify, merge, publish, dis- | |
8 # tribute, sublicense, and/or sell copies of the Software, and to permit | |
9 # persons to whom the Software is furnished to do so, subject to the fol- | |
10 # lowing conditions: | |
11 # | |
12 # The above copyright notice and this permission notice shall be included | |
13 # in all copies or substantial portions of the Software. | |
14 # | |
15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
16 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- | |
17 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT | |
18 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, | |
19 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |
20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS | |
21 # IN THE SOFTWARE. | |
22 # | |
23 import getopt | |
24 import sys | |
25 import imp | |
26 import time | |
27 import boto | |
28 | |
29 usage_string = """ | |
30 SYNOPSIS | |
31 launch_ami.py -a ami_id [-b script_bucket] [-s script_name] | |
32 [-m module] [-c class_name] [-r] | |
33 [-g group] [-k key_name] [-n num_instances] | |
34 [-w] [extra_data] | |
35 Where: | |
36 ami_id - the id of the AMI you wish to launch | |
37 module - The name of the Python module containing the class you | |
38 want to run when the instance is started. If you use this | |
39 option the Python module must already be stored on the | |
40 instance in a location that is on the Python path. | |
41 script_file - The name of a local Python module that you would like | |
42 to have copied to S3 and then run on the instance | |
43 when it is started. The specified module must be | |
44 import'able (i.e. in your local Python path). It | |
45 will then be copied to the specified bucket in S3 | |
46 (see the -b option). Once the new instance(s) | |
47 start up the script will be copied from S3 and then | |
48 run locally on the instance. | |
49 class_name - The name of the class to be instantiated within the | |
50 module or script file specified. | |
51 script_bucket - the name of the bucket in which the script will be | |
52 stored | |
53 group - the name of the security group the instance will run in | |
54 key_name - the name of the keypair to use when launching the AMI | |
55 num_instances - how many instances of the AMI to launch (default 1) | |
56 input_queue_name - Name of SQS to read input messages from | |
57 output_queue_name - Name of SQS to write output messages to | |
58 extra_data - additional name-value pairs that will be passed as | |
59 userdata to the newly launched instance. These should | |
60 be of the form "name=value" | |
61 The -r option reloads the Python module to S3 without launching | |
62 another instance. This can be useful during debugging to allow | |
63 you to test a new version of your script without shutting down | |
64 your instance and starting up another one. | |
65 The -w option tells the script to run synchronously, meaning to | |
66 wait until the instance is actually up and running. It then prints | |
67 the IP address and internal and external DNS names before exiting. | |
68 """ | |
69 | |
70 def usage(): | |
71 print(usage_string) | |
72 sys.exit() | |
73 | |
74 def main(): | |
75 try: | |
76 opts, args = getopt.getopt(sys.argv[1:], 'a:b:c:g:hi:k:m:n:o:rs:w', | |
77 ['ami', 'bucket', 'class', 'group', 'help', | |
78 'inputqueue', 'keypair', 'module', | |
79 'numinstances', 'outputqueue', | |
80 'reload', 'script_name', 'wait']) | |
81 except: | |
82 usage() | |
83 params = {'module_name': None, | |
84 'script_name': None, | |
85 'class_name': None, | |
86 'script_bucket': None, | |
87 'group': 'default', | |
88 'keypair': None, | |
89 'ami': None, | |
90 'num_instances': 1, | |
91 'input_queue_name': None, | |
92 'output_queue_name': None} | |
93 reload = None | |
94 wait = None | |
95 for o, a in opts: | |
96 if o in ('-a', '--ami'): | |
97 params['ami'] = a | |
98 if o in ('-b', '--bucket'): | |
99 params['script_bucket'] = a | |
100 if o in ('-c', '--class'): | |
101 params['class_name'] = a | |
102 if o in ('-g', '--group'): | |
103 params['group'] = a | |
104 if o in ('-h', '--help'): | |
105 usage() | |
106 if o in ('-i', '--inputqueue'): | |
107 params['input_queue_name'] = a | |
108 if o in ('-k', '--keypair'): | |
109 params['keypair'] = a | |
110 if o in ('-m', '--module'): | |
111 params['module_name'] = a | |
112 if o in ('-n', '--num_instances'): | |
113 params['num_instances'] = int(a) | |
114 if o in ('-o', '--outputqueue'): | |
115 params['output_queue_name'] = a | |
116 if o in ('-r', '--reload'): | |
117 reload = True | |
118 if o in ('-s', '--script'): | |
119 params['script_name'] = a | |
120 if o in ('-w', '--wait'): | |
121 wait = True | |
122 | |
123 # check required fields | |
124 required = ['ami'] | |
125 for pname in required: | |
126 if not params.get(pname, None): | |
127 print('%s is required' % pname) | |
128 usage() | |
129 if params['script_name']: | |
130 # first copy the desired module file to S3 bucket | |
131 if reload: | |
132 print('Reloading module %s to S3' % params['script_name']) | |
133 else: | |
134 print('Copying module %s to S3' % params['script_name']) | |
135 l = imp.find_module(params['script_name']) | |
136 c = boto.connect_s3() | |
137 bucket = c.get_bucket(params['script_bucket']) | |
138 key = bucket.new_key(params['script_name'] + '.py') | |
139 key.set_contents_from_file(l[0]) | |
140 params['script_md5'] = key.md5 | |
141 # we have everything we need, now build userdata string | |
142 l = [] | |
143 for k, v in params.items(): | |
144 if v: | |
145 l.append('%s=%s' % (k, v)) | |
146 c = boto.connect_ec2() | |
147 l.append('aws_access_key_id=%s' % c.aws_access_key_id) | |
148 l.append('aws_secret_access_key=%s' % c.aws_secret_access_key) | |
149 for kv in args: | |
150 l.append(kv) | |
151 s = '|'.join(l) | |
152 if not reload: | |
153 rs = c.get_all_images([params['ami']]) | |
154 img = rs[0] | |
155 r = img.run(user_data=s, key_name=params['keypair'], | |
156 security_groups=[params['group']], | |
157 max_count=params.get('num_instances', 1)) | |
158 print('AMI: %s - %s (Started)' % (params['ami'], img.location)) | |
159 print('Reservation %s contains the following instances:' % r.id) | |
160 for i in r.instances: | |
161 print('\t%s' % i.id) | |
162 if wait: | |
163 running = False | |
164 while not running: | |
165 time.sleep(30) | |
166 [i.update() for i in r.instances] | |
167 status = [i.state for i in r.instances] | |
168 print(status) | |
169 if status.count('running') == len(r.instances): | |
170 running = True | |
171 for i in r.instances: | |
172 print('Instance: %s' % i.ami_launch_index) | |
173 print('Public DNS Name: %s' % i.public_dns_name) | |
174 print('Private DNS Name: %s' % i.private_dns_name) | |
175 | |
176 if __name__ == "__main__": | |
177 main() |