Mercurial > repos > shellac > guppy_basecaller
comparison env/lib/python3.7/site-packages/boto/utils.py @ 0:26e78fe6e8c4 draft
"planemo upload commit c699937486c35866861690329de38ec1a5d9f783"
author | shellac |
---|---|
date | Sat, 02 May 2020 07:14:21 -0400 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:26e78fe6e8c4 |
---|---|
1 # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/ | |
2 # Copyright (c) 2010, Eucalyptus Systems, Inc. | |
3 # Copyright (c) 2012 Amazon.com, Inc. or its affiliates. | |
4 # All rights reserved. | |
5 # | |
6 # Permission is hereby granted, free of charge, to any person obtaining a | |
7 # copy of this software and associated documentation files (the | |
8 # "Software"), to deal in the Software without restriction, including | |
9 # without limitation the rights to use, copy, modify, merge, publish, dis- | |
10 # tribute, sublicense, and/or sell copies of the Software, and to permit | |
11 # persons to whom the Software is furnished to do so, subject to the fol- | |
12 # lowing conditions: | |
13 # | |
14 # The above copyright notice and this permission notice shall be included | |
15 # in all copies or substantial portions of the Software. | |
16 # | |
17 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
18 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- | |
19 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT | |
20 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, | |
21 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |
22 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS | |
23 # IN THE SOFTWARE. | |
24 | |
25 # | |
26 # Parts of this code were copied or derived from sample code supplied by AWS. | |
27 # The following notice applies to that code. | |
28 # | |
29 # This software code is made available "AS IS" without warranties of any | |
30 # kind. You may copy, display, modify and redistribute the software | |
31 # code either by itself or as incorporated into your code; provided that | |
32 # you do not remove any proprietary notices. Your use of this software | |
33 # code is at your own risk and you waive any claim against Amazon | |
34 # Digital Services, Inc. or its affiliates with respect to your use of | |
35 # this software code. (c) 2006 Amazon Digital Services, Inc. or its | |
36 # affiliates. | |
37 | |
38 """ | |
39 Some handy utility functions used by several classes. | |
40 """ | |
41 | |
42 import subprocess | |
43 import time | |
44 import logging.handlers | |
45 import boto | |
46 import boto.provider | |
47 import tempfile | |
48 import random | |
49 import smtplib | |
50 import datetime | |
51 import re | |
52 import email.mime.multipart | |
53 import email.mime.base | |
54 import email.mime.text | |
55 import email.utils | |
56 import email.encoders | |
57 import gzip | |
58 import threading | |
59 import locale | |
60 from boto.compat import six, StringIO, urllib, encodebytes | |
61 | |
62 from contextlib import contextmanager | |
63 | |
64 from hashlib import md5, sha512 | |
65 _hashfn = sha512 | |
66 | |
67 from boto.compat import json | |
68 | |
69 try: | |
70 from boto.compat.json import JSONDecodeError | |
71 except ImportError: | |
72 JSONDecodeError = ValueError | |
73 | |
74 # List of Query String Arguments of Interest | |
75 qsa_of_interest = ['acl', 'cors', 'defaultObjectAcl', 'location', 'logging', | |
76 'partNumber', 'policy', 'requestPayment', 'torrent', | |
77 'versioning', 'versionId', 'versions', 'website', | |
78 'uploads', 'uploadId', 'response-content-type', | |
79 'response-content-language', 'response-expires', | |
80 'response-cache-control', 'response-content-disposition', | |
81 'response-content-encoding', 'delete', 'lifecycle', | |
82 'tagging', 'restore', | |
83 # storageClass is a QSA for buckets in Google Cloud Storage. | |
84 # (StorageClass is associated to individual keys in S3, but | |
85 # having it listed here should cause no problems because | |
86 # GET bucket?storageClass is not part of the S3 API.) | |
87 'storageClass', | |
88 # websiteConfig is a QSA for buckets in Google Cloud | |
89 # Storage. | |
90 'websiteConfig', | |
91 # compose is a QSA for objects in Google Cloud Storage. | |
92 'compose', | |
93 # billing is a QSA for buckets in Google Cloud Storage. | |
94 'billing', | |
95 # userProject is a QSA for requests in Google Cloud Storage. | |
96 'userProject', | |
97 # encryptionConfig is a QSA for requests in Google Cloud | |
98 # Storage. | |
99 'encryptionConfig'] | |
100 | |
101 | |
102 _first_cap_regex = re.compile('(.)([A-Z][a-z]+)') | |
103 _number_cap_regex = re.compile('([a-z])([0-9]+)') | |
104 _end_cap_regex = re.compile('([a-z0-9])([A-Z])') | |
105 | |
106 | |
107 def unquote_v(nv): | |
108 if len(nv) == 1: | |
109 return nv | |
110 else: | |
111 return (nv[0], urllib.parse.unquote(nv[1])) | |
112 | |
113 | |
114 def canonical_string(method, path, headers, expires=None, | |
115 provider=None): | |
116 """ | |
117 Generates the aws canonical string for the given parameters | |
118 """ | |
119 if not provider: | |
120 provider = boto.provider.get_default() | |
121 interesting_headers = {} | |
122 for key in headers: | |
123 lk = key.lower() | |
124 if headers[key] is not None and \ | |
125 (lk in ['content-md5', 'content-type', 'date'] or | |
126 lk.startswith(provider.header_prefix)): | |
127 interesting_headers[lk] = str(headers[key]).strip() | |
128 | |
129 # these keys get empty strings if they don't exist | |
130 if 'content-type' not in interesting_headers: | |
131 interesting_headers['content-type'] = '' | |
132 if 'content-md5' not in interesting_headers: | |
133 interesting_headers['content-md5'] = '' | |
134 | |
135 # just in case someone used this. it's not necessary in this lib. | |
136 if provider.date_header in interesting_headers: | |
137 interesting_headers['date'] = '' | |
138 | |
139 # if you're using expires for query string auth, then it trumps date | |
140 # (and provider.date_header) | |
141 if expires: | |
142 interesting_headers['date'] = str(expires) | |
143 | |
144 sorted_header_keys = sorted(interesting_headers.keys()) | |
145 | |
146 buf = "%s\n" % method | |
147 for key in sorted_header_keys: | |
148 val = interesting_headers[key] | |
149 if key.startswith(provider.header_prefix): | |
150 buf += "%s:%s\n" % (key, val) | |
151 else: | |
152 buf += "%s\n" % val | |
153 | |
154 # don't include anything after the first ? in the resource... | |
155 # unless it is one of the QSA of interest, defined above | |
156 t = path.split('?') | |
157 buf += t[0] | |
158 | |
159 if len(t) > 1: | |
160 qsa = t[1].split('&') | |
161 qsa = [a.split('=', 1) for a in qsa] | |
162 qsa = [unquote_v(a) for a in qsa if a[0] in qsa_of_interest] | |
163 if len(qsa) > 0: | |
164 qsa.sort(key=lambda x: x[0]) | |
165 qsa = ['='.join(a) for a in qsa] | |
166 buf += '?' | |
167 buf += '&'.join(qsa) | |
168 | |
169 return buf | |
170 | |
171 | |
172 def merge_meta(headers, metadata, provider=None): | |
173 if not provider: | |
174 provider = boto.provider.get_default() | |
175 metadata_prefix = provider.metadata_prefix | |
176 final_headers = headers.copy() | |
177 for k in metadata.keys(): | |
178 if k.lower() in boto.s3.key.Key.base_user_settable_fields: | |
179 final_headers[k] = metadata[k] | |
180 else: | |
181 final_headers[metadata_prefix + k] = metadata[k] | |
182 | |
183 return final_headers | |
184 | |
185 | |
186 def get_aws_metadata(headers, provider=None): | |
187 if not provider: | |
188 provider = boto.provider.get_default() | |
189 metadata_prefix = provider.metadata_prefix | |
190 metadata = {} | |
191 for hkey in headers.keys(): | |
192 if hkey.lower().startswith(metadata_prefix): | |
193 val = urllib.parse.unquote(headers[hkey]) | |
194 if isinstance(val, bytes): | |
195 try: | |
196 val = val.decode('utf-8') | |
197 except UnicodeDecodeError: | |
198 # Just leave the value as-is | |
199 pass | |
200 metadata[hkey[len(metadata_prefix):]] = val | |
201 del headers[hkey] | |
202 return metadata | |
203 | |
204 | |
205 def retry_url(url, retry_on_404=True, num_retries=10, timeout=None): | |
206 """ | |
207 Retry a url. This is specifically used for accessing the metadata | |
208 service on an instance. Since this address should never be proxied | |
209 (for security reasons), we create a ProxyHandler with a NULL | |
210 dictionary to override any proxy settings in the environment. | |
211 """ | |
212 for i in range(0, num_retries): | |
213 try: | |
214 proxy_handler = urllib.request.ProxyHandler({}) | |
215 opener = urllib.request.build_opener(proxy_handler) | |
216 req = urllib.request.Request(url) | |
217 r = opener.open(req, timeout=timeout) | |
218 result = r.read() | |
219 | |
220 if(not isinstance(result, six.string_types) and | |
221 hasattr(result, 'decode')): | |
222 result = result.decode('utf-8') | |
223 | |
224 return result | |
225 except urllib.error.HTTPError as e: | |
226 code = e.getcode() | |
227 if code == 404 and not retry_on_404: | |
228 return '' | |
229 except Exception as e: | |
230 boto.log.exception('Caught exception reading instance data') | |
231 # If not on the last iteration of the loop then sleep. | |
232 if i + 1 != num_retries: | |
233 boto.log.debug('Sleeping before retrying') | |
234 time.sleep(min(2 ** i, | |
235 boto.config.get('Boto', 'max_retry_delay', 60))) | |
236 boto.log.error('Unable to read instance data, giving up') | |
237 return '' | |
238 | |
239 | |
240 def _get_instance_metadata(url, num_retries, timeout=None): | |
241 return LazyLoadMetadata(url, num_retries, timeout) | |
242 | |
243 | |
244 class LazyLoadMetadata(dict): | |
245 def __init__(self, url, num_retries, timeout=None): | |
246 self._url = url | |
247 self._num_retries = num_retries | |
248 self._leaves = {} | |
249 self._dicts = [] | |
250 self._timeout = timeout | |
251 data = boto.utils.retry_url(self._url, num_retries=self._num_retries, timeout=self._timeout) | |
252 if data: | |
253 fields = data.split('\n') | |
254 for field in fields: | |
255 if field.endswith('/'): | |
256 key = field[0:-1] | |
257 self._dicts.append(key) | |
258 else: | |
259 p = field.find('=') | |
260 if p > 0: | |
261 key = field[p + 1:] | |
262 resource = field[0:p] + '/openssh-key' | |
263 else: | |
264 key = resource = field | |
265 self._leaves[key] = resource | |
266 self[key] = None | |
267 | |
268 def _materialize(self): | |
269 for key in self: | |
270 self[key] | |
271 | |
272 def __getitem__(self, key): | |
273 if key not in self: | |
274 # allow dict to throw the KeyError | |
275 return super(LazyLoadMetadata, self).__getitem__(key) | |
276 | |
277 # already loaded | |
278 val = super(LazyLoadMetadata, self).__getitem__(key) | |
279 if val is not None: | |
280 return val | |
281 | |
282 if key in self._leaves: | |
283 resource = self._leaves[key] | |
284 last_exception = None | |
285 | |
286 for i in range(0, self._num_retries): | |
287 try: | |
288 val = boto.utils.retry_url( | |
289 self._url + urllib.parse.quote(resource, | |
290 safe="/:"), | |
291 num_retries=self._num_retries, | |
292 timeout=self._timeout) | |
293 if val and val[0] == '{': | |
294 val = json.loads(val) | |
295 break | |
296 else: | |
297 p = val.find('\n') | |
298 if p > 0: | |
299 val = val.split('\n') | |
300 break | |
301 | |
302 except JSONDecodeError as e: | |
303 boto.log.debug( | |
304 "encountered '%s' exception: %s" % ( | |
305 e.__class__.__name__, e)) | |
306 boto.log.debug( | |
307 'corrupted JSON data found: %s' % val) | |
308 last_exception = e | |
309 | |
310 except Exception as e: | |
311 boto.log.debug("encountered unretryable" + | |
312 " '%s' exception, re-raising" % ( | |
313 e.__class__.__name__)) | |
314 last_exception = e | |
315 raise | |
316 | |
317 boto.log.error("Caught exception reading meta data" + | |
318 " for the '%s' try" % (i + 1)) | |
319 | |
320 if i + 1 != self._num_retries: | |
321 next_sleep = min( | |
322 random.random() * 2 ** i, | |
323 boto.config.get('Boto', 'max_retry_delay', 60)) | |
324 time.sleep(next_sleep) | |
325 else: | |
326 boto.log.error('Unable to read meta data, giving up') | |
327 boto.log.error( | |
328 "encountered '%s' exception: %s" % ( | |
329 last_exception.__class__.__name__, last_exception)) | |
330 raise last_exception | |
331 | |
332 self[key] = val | |
333 elif key in self._dicts: | |
334 self[key] = LazyLoadMetadata(self._url + key + '/', | |
335 self._num_retries) | |
336 | |
337 return super(LazyLoadMetadata, self).__getitem__(key) | |
338 | |
339 def get(self, key, default=None): | |
340 try: | |
341 return self[key] | |
342 except KeyError: | |
343 return default | |
344 | |
345 def values(self): | |
346 self._materialize() | |
347 return super(LazyLoadMetadata, self).values() | |
348 | |
349 def items(self): | |
350 self._materialize() | |
351 return super(LazyLoadMetadata, self).items() | |
352 | |
353 def __str__(self): | |
354 self._materialize() | |
355 return super(LazyLoadMetadata, self).__str__() | |
356 | |
357 def __repr__(self): | |
358 self._materialize() | |
359 return super(LazyLoadMetadata, self).__repr__() | |
360 | |
361 | |
362 def _build_instance_metadata_url(url, version, path): | |
363 """ | |
364 Builds an EC2 metadata URL for fetching information about an instance. | |
365 | |
366 Example: | |
367 | |
368 >>> _build_instance_metadata_url('http://169.254.169.254', 'latest', 'meta-data/') | |
369 http://169.254.169.254/latest/meta-data/ | |
370 | |
371 :type url: string | |
372 :param url: URL to metadata service, e.g. 'http://169.254.169.254' | |
373 | |
374 :type version: string | |
375 :param version: Version of the metadata to get, e.g. 'latest' | |
376 | |
377 :type path: string | |
378 :param path: Path of the metadata to get, e.g. 'meta-data/'. If a trailing | |
379 slash is required it must be passed in with the path. | |
380 | |
381 :return: The full metadata URL | |
382 """ | |
383 return '%s/%s/%s' % (url, version, path) | |
384 | |
385 | |
386 def get_instance_metadata(version='latest', url='http://169.254.169.254', | |
387 data='meta-data/', timeout=None, num_retries=5): | |
388 """ | |
389 Returns the instance metadata as a nested Python dictionary. | |
390 Simple values (e.g. local_hostname, hostname, etc.) will be | |
391 stored as string values. Values such as ancestor-ami-ids will | |
392 be stored in the dict as a list of string values. More complex | |
393 fields such as public-keys and will be stored as nested dicts. | |
394 | |
395 If the timeout is specified, the connection to the specified url | |
396 will time out after the specified number of seconds. | |
397 | |
398 """ | |
399 try: | |
400 metadata_url = _build_instance_metadata_url(url, version, data) | |
401 return _get_instance_metadata(metadata_url, num_retries=num_retries, timeout=timeout) | |
402 except urllib.error.URLError: | |
403 boto.log.exception("Exception caught when trying to retrieve " | |
404 "instance metadata for: %s", data) | |
405 return None | |
406 | |
407 | |
408 def get_instance_identity(version='latest', url='http://169.254.169.254', | |
409 timeout=None, num_retries=5): | |
410 """ | |
411 Returns the instance identity as a nested Python dictionary. | |
412 """ | |
413 iid = {} | |
414 base_url = _build_instance_metadata_url(url, version, | |
415 'dynamic/instance-identity/') | |
416 try: | |
417 data = retry_url(base_url, num_retries=num_retries, timeout=timeout) | |
418 fields = data.split('\n') | |
419 for field in fields: | |
420 val = retry_url(base_url + '/' + field + '/', num_retries=num_retries, timeout=timeout) | |
421 if val[0] == '{': | |
422 val = json.loads(val) | |
423 if field: | |
424 iid[field] = val | |
425 return iid | |
426 except urllib.error.URLError: | |
427 return None | |
428 | |
429 | |
430 def get_instance_userdata(version='latest', sep=None, | |
431 url='http://169.254.169.254', timeout=None, num_retries=5): | |
432 ud_url = _build_instance_metadata_url(url, version, 'user-data') | |
433 user_data = retry_url(ud_url, retry_on_404=False, num_retries=num_retries, timeout=timeout) | |
434 if user_data: | |
435 if sep: | |
436 l = user_data.split(sep) | |
437 user_data = {} | |
438 for nvpair in l: | |
439 t = nvpair.split('=') | |
440 user_data[t[0].strip()] = t[1].strip() | |
441 return user_data | |
442 | |
443 ISO8601 = '%Y-%m-%dT%H:%M:%SZ' | |
444 ISO8601_MS = '%Y-%m-%dT%H:%M:%S.%fZ' | |
445 RFC1123 = '%a, %d %b %Y %H:%M:%S %Z' | |
446 LOCALE_LOCK = threading.Lock() | |
447 | |
448 | |
449 @contextmanager | |
450 def setlocale(name): | |
451 """ | |
452 A context manager to set the locale in a threadsafe manner. | |
453 """ | |
454 with LOCALE_LOCK: | |
455 saved = locale.setlocale(locale.LC_ALL) | |
456 | |
457 try: | |
458 yield locale.setlocale(locale.LC_ALL, name) | |
459 finally: | |
460 locale.setlocale(locale.LC_ALL, saved) | |
461 | |
462 | |
463 def get_ts(ts=None): | |
464 if not ts: | |
465 ts = time.gmtime() | |
466 return time.strftime(ISO8601, ts) | |
467 | |
468 | |
469 def parse_ts(ts): | |
470 with setlocale('C'): | |
471 ts = ts.strip() | |
472 try: | |
473 dt = datetime.datetime.strptime(ts, ISO8601) | |
474 return dt | |
475 except ValueError: | |
476 try: | |
477 dt = datetime.datetime.strptime(ts, ISO8601_MS) | |
478 return dt | |
479 except ValueError: | |
480 dt = datetime.datetime.strptime(ts, RFC1123) | |
481 return dt | |
482 | |
483 | |
484 def find_class(module_name, class_name=None): | |
485 if class_name: | |
486 module_name = "%s.%s" % (module_name, class_name) | |
487 modules = module_name.split('.') | |
488 c = None | |
489 | |
490 try: | |
491 for m in modules[1:]: | |
492 if c: | |
493 c = getattr(c, m) | |
494 else: | |
495 c = getattr(__import__(".".join(modules[0:-1])), m) | |
496 return c | |
497 except: | |
498 return None | |
499 | |
500 | |
501 def update_dme(username, password, dme_id, ip_address): | |
502 """ | |
503 Update your Dynamic DNS record with DNSMadeEasy.com | |
504 """ | |
505 dme_url = 'https://www.dnsmadeeasy.com/servlet/updateip' | |
506 dme_url += '?username=%s&password=%s&id=%s&ip=%s' | |
507 s = urllib.request.urlopen(dme_url % (username, password, dme_id, ip_address)) | |
508 return s.read() | |
509 | |
510 | |
511 def fetch_file(uri, file=None, username=None, password=None): | |
512 """ | |
513 Fetch a file based on the URI provided. | |
514 If you do not pass in a file pointer a tempfile.NamedTemporaryFile, | |
515 or None if the file could not be retrieved is returned. | |
516 The URI can be either an HTTP url, or "s3://bucket_name/key_name" | |
517 """ | |
518 boto.log.info('Fetching %s' % uri) | |
519 if file is None: | |
520 file = tempfile.NamedTemporaryFile() | |
521 try: | |
522 if uri.startswith('s3://'): | |
523 bucket_name, key_name = uri[len('s3://'):].split('/', 1) | |
524 c = boto.connect_s3(aws_access_key_id=username, | |
525 aws_secret_access_key=password) | |
526 bucket = c.get_bucket(bucket_name) | |
527 key = bucket.get_key(key_name) | |
528 key.get_contents_to_file(file) | |
529 else: | |
530 if username and password: | |
531 passman = urllib.request.HTTPPasswordMgrWithDefaultRealm() | |
532 passman.add_password(None, uri, username, password) | |
533 authhandler = urllib.request.HTTPBasicAuthHandler(passman) | |
534 opener = urllib.request.build_opener(authhandler) | |
535 urllib.request.install_opener(opener) | |
536 s = urllib.request.urlopen(uri) | |
537 file.write(s.read()) | |
538 file.seek(0) | |
539 except: | |
540 raise | |
541 boto.log.exception('Problem Retrieving file: %s' % uri) | |
542 file = None | |
543 return file | |
544 | |
545 | |
546 class ShellCommand(object): | |
547 | |
548 def __init__(self, command, wait=True, fail_fast=False, cwd=None): | |
549 self.exit_code = 0 | |
550 self.command = command | |
551 self.log_fp = StringIO() | |
552 self.wait = wait | |
553 self.fail_fast = fail_fast | |
554 self.run(cwd=cwd) | |
555 | |
556 def run(self, cwd=None): | |
557 boto.log.info('running:%s' % self.command) | |
558 self.process = subprocess.Popen(self.command, shell=True, | |
559 stdin=subprocess.PIPE, | |
560 stdout=subprocess.PIPE, | |
561 stderr=subprocess.PIPE, | |
562 cwd=cwd) | |
563 if(self.wait): | |
564 while self.process.poll() is None: | |
565 time.sleep(1) | |
566 t = self.process.communicate() | |
567 self.log_fp.write(t[0]) | |
568 self.log_fp.write(t[1]) | |
569 boto.log.info(self.log_fp.getvalue()) | |
570 self.exit_code = self.process.returncode | |
571 | |
572 if self.fail_fast and self.exit_code != 0: | |
573 raise Exception("Command " + self.command + | |
574 " failed with status " + self.exit_code) | |
575 | |
576 return self.exit_code | |
577 | |
578 def setReadOnly(self, value): | |
579 raise AttributeError | |
580 | |
581 def getStatus(self): | |
582 return self.exit_code | |
583 | |
584 status = property(getStatus, setReadOnly, None, | |
585 'The exit code for the command') | |
586 | |
587 def getOutput(self): | |
588 return self.log_fp.getvalue() | |
589 | |
590 output = property(getOutput, setReadOnly, None, | |
591 'The STDIN and STDERR output of the command') | |
592 | |
593 | |
594 class AuthSMTPHandler(logging.handlers.SMTPHandler): | |
595 """ | |
596 This class extends the SMTPHandler in the standard Python logging module | |
597 to accept a username and password on the constructor and to then use those | |
598 credentials to authenticate with the SMTP server. To use this, you could | |
599 add something like this in your boto config file: | |
600 | |
601 [handler_hand07] | |
602 class=boto.utils.AuthSMTPHandler | |
603 level=WARN | |
604 formatter=form07 | |
605 args=('localhost', 'username', 'password', 'from@abc', ['user1@abc', 'user2@xyz'], 'Logger Subject') | |
606 """ | |
607 | |
608 def __init__(self, mailhost, username, password, | |
609 fromaddr, toaddrs, subject): | |
610 """ | |
611 Initialize the handler. | |
612 | |
613 We have extended the constructor to accept a username/password | |
614 for SMTP authentication. | |
615 """ | |
616 super(AuthSMTPHandler, self).__init__(mailhost, fromaddr, | |
617 toaddrs, subject) | |
618 self.username = username | |
619 self.password = password | |
620 | |
621 def emit(self, record): | |
622 """ | |
623 Emit a record. | |
624 | |
625 Format the record and send it to the specified addressees. | |
626 It would be really nice if I could add authorization to this class | |
627 without having to resort to cut and paste inheritance but, no. | |
628 """ | |
629 try: | |
630 port = self.mailport | |
631 if not port: | |
632 port = smtplib.SMTP_PORT | |
633 smtp = smtplib.SMTP(self.mailhost, port) | |
634 smtp.login(self.username, self.password) | |
635 msg = self.format(record) | |
636 msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % ( | |
637 self.fromaddr, | |
638 ','.join(self.toaddrs), | |
639 self.getSubject(record), | |
640 email.utils.formatdate(), msg) | |
641 smtp.sendmail(self.fromaddr, self.toaddrs, msg) | |
642 smtp.quit() | |
643 except (KeyboardInterrupt, SystemExit): | |
644 raise | |
645 except: | |
646 self.handleError(record) | |
647 | |
648 | |
649 class LRUCache(dict): | |
650 """A dictionary-like object that stores only a certain number of items, and | |
651 discards its least recently used item when full. | |
652 | |
653 >>> cache = LRUCache(3) | |
654 >>> cache['A'] = 0 | |
655 >>> cache['B'] = 1 | |
656 >>> cache['C'] = 2 | |
657 >>> len(cache) | |
658 3 | |
659 | |
660 >>> cache['A'] | |
661 0 | |
662 | |
663 Adding new items to the cache does not increase its size. Instead, the least | |
664 recently used item is dropped: | |
665 | |
666 >>> cache['D'] = 3 | |
667 >>> len(cache) | |
668 3 | |
669 >>> 'B' in cache | |
670 False | |
671 | |
672 Iterating over the cache returns the keys, starting with the most recently | |
673 used: | |
674 | |
675 >>> for key in cache: | |
676 ... print key | |
677 D | |
678 A | |
679 C | |
680 | |
681 This code is based on the LRUCache class from Genshi which is based on | |
682 `Myghty <http://www.myghty.org>`_'s LRUCache from ``myghtyutils.util``, | |
683 written by Mike Bayer and released under the MIT license (Genshi uses the | |
684 BSD License). | |
685 """ | |
686 | |
687 class _Item(object): | |
688 def __init__(self, key, value): | |
689 self.previous = self.next = None | |
690 self.key = key | |
691 self.value = value | |
692 | |
693 def __repr__(self): | |
694 return repr(self.value) | |
695 | |
696 def __init__(self, capacity): | |
697 self._dict = dict() | |
698 self.capacity = capacity | |
699 self.head = None | |
700 self.tail = None | |
701 | |
702 def __contains__(self, key): | |
703 return key in self._dict | |
704 | |
705 def __iter__(self): | |
706 cur = self.head | |
707 while cur: | |
708 yield cur.key | |
709 cur = cur.next | |
710 | |
711 def __len__(self): | |
712 return len(self._dict) | |
713 | |
714 def __getitem__(self, key): | |
715 item = self._dict[key] | |
716 self._update_item(item) | |
717 return item.value | |
718 | |
719 def __setitem__(self, key, value): | |
720 item = self._dict.get(key) | |
721 if item is None: | |
722 item = self._Item(key, value) | |
723 self._dict[key] = item | |
724 self._insert_item(item) | |
725 else: | |
726 item.value = value | |
727 self._update_item(item) | |
728 self._manage_size() | |
729 | |
730 def __repr__(self): | |
731 return repr(self._dict) | |
732 | |
733 def _insert_item(self, item): | |
734 item.previous = None | |
735 item.next = self.head | |
736 if self.head is not None: | |
737 self.head.previous = item | |
738 else: | |
739 self.tail = item | |
740 self.head = item | |
741 self._manage_size() | |
742 | |
743 def _manage_size(self): | |
744 while len(self._dict) > self.capacity: | |
745 del self._dict[self.tail.key] | |
746 if self.tail != self.head: | |
747 self.tail = self.tail.previous | |
748 self.tail.next = None | |
749 else: | |
750 self.head = self.tail = None | |
751 | |
752 def _update_item(self, item): | |
753 if self.head == item: | |
754 return | |
755 | |
756 previous = item.previous | |
757 previous.next = item.next | |
758 if item.next is not None: | |
759 item.next.previous = previous | |
760 else: | |
761 self.tail = previous | |
762 | |
763 item.previous = None | |
764 item.next = self.head | |
765 self.head.previous = self.head = item | |
766 | |
767 | |
768 class Password(object): | |
769 """ | |
770 Password object that stores itself as hashed. | |
771 Hash defaults to SHA512 if available, MD5 otherwise. | |
772 """ | |
773 hashfunc = _hashfn | |
774 | |
775 def __init__(self, str=None, hashfunc=None): | |
776 """ | |
777 Load the string from an initial value, this should be the | |
778 raw hashed password. | |
779 """ | |
780 self.str = str | |
781 if hashfunc: | |
782 self.hashfunc = hashfunc | |
783 | |
784 def set(self, value): | |
785 if not isinstance(value, bytes): | |
786 value = value.encode('utf-8') | |
787 self.str = self.hashfunc(value).hexdigest() | |
788 | |
789 def __str__(self): | |
790 return str(self.str) | |
791 | |
792 def __eq__(self, other): | |
793 if other is None: | |
794 return False | |
795 if not isinstance(other, bytes): | |
796 other = other.encode('utf-8') | |
797 return str(self.hashfunc(other).hexdigest()) == str(self.str) | |
798 | |
799 def __len__(self): | |
800 if self.str: | |
801 return len(self.str) | |
802 else: | |
803 return 0 | |
804 | |
805 | |
806 def notify(subject, body=None, html_body=None, to_string=None, | |
807 attachments=None, append_instance_id=True): | |
808 attachments = attachments or [] | |
809 if append_instance_id: | |
810 subject = "[%s] %s" % ( | |
811 boto.config.get_value("Instance", "instance-id"), subject) | |
812 if not to_string: | |
813 to_string = boto.config.get_value('Notification', 'smtp_to', None) | |
814 if to_string: | |
815 try: | |
816 from_string = boto.config.get_value('Notification', | |
817 'smtp_from', 'boto') | |
818 msg = email.mime.multipart.MIMEMultipart() | |
819 msg['From'] = from_string | |
820 msg['Reply-To'] = from_string | |
821 msg['To'] = to_string | |
822 msg['Date'] = email.utils.formatdate(localtime=True) | |
823 msg['Subject'] = subject | |
824 | |
825 if body: | |
826 msg.attach(email.mime.text.MIMEText(body)) | |
827 | |
828 if html_body: | |
829 part = email.mime.base.MIMEBase('text', 'html') | |
830 part.set_payload(html_body) | |
831 email.encoders.encode_base64(part) | |
832 msg.attach(part) | |
833 | |
834 for part in attachments: | |
835 msg.attach(part) | |
836 | |
837 smtp_host = boto.config.get_value('Notification', | |
838 'smtp_host', 'localhost') | |
839 | |
840 # Alternate port support | |
841 if boto.config.get_value("Notification", "smtp_port"): | |
842 server = smtplib.SMTP(smtp_host, int( | |
843 boto.config.get_value("Notification", "smtp_port"))) | |
844 else: | |
845 server = smtplib.SMTP(smtp_host) | |
846 | |
847 # TLS support | |
848 if boto.config.getbool("Notification", "smtp_tls"): | |
849 server.ehlo() | |
850 server.starttls() | |
851 server.ehlo() | |
852 smtp_user = boto.config.get_value('Notification', 'smtp_user', '') | |
853 smtp_pass = boto.config.get_value('Notification', 'smtp_pass', '') | |
854 if smtp_user: | |
855 server.login(smtp_user, smtp_pass) | |
856 server.sendmail(from_string, to_string, msg.as_string()) | |
857 server.quit() | |
858 except: | |
859 boto.log.exception('notify failed') | |
860 | |
861 | |
862 def get_utf8_value(value): | |
863 if not six.PY2 and isinstance(value, bytes): | |
864 return value | |
865 | |
866 if not isinstance(value, six.string_types): | |
867 value = six.text_type(value) | |
868 | |
869 if isinstance(value, six.text_type): | |
870 value = value.encode('utf-8') | |
871 | |
872 return value | |
873 | |
874 | |
875 def mklist(value): | |
876 if not isinstance(value, list): | |
877 if isinstance(value, tuple): | |
878 value = list(value) | |
879 else: | |
880 value = [value] | |
881 return value | |
882 | |
883 | |
884 def pythonize_name(name): | |
885 """Convert camel case to a "pythonic" name. | |
886 | |
887 Examples:: | |
888 | |
889 pythonize_name('CamelCase') -> 'camel_case' | |
890 pythonize_name('already_pythonized') -> 'already_pythonized' | |
891 pythonize_name('HTTPRequest') -> 'http_request' | |
892 pythonize_name('HTTPStatus200Ok') -> 'http_status_200_ok' | |
893 pythonize_name('UPPER') -> 'upper' | |
894 pythonize_name('') -> '' | |
895 | |
896 """ | |
897 s1 = _first_cap_regex.sub(r'\1_\2', name) | |
898 s2 = _number_cap_regex.sub(r'\1_\2', s1) | |
899 return _end_cap_regex.sub(r'\1_\2', s2).lower() | |
900 | |
901 | |
902 def write_mime_multipart(content, compress=False, deftype='text/plain', delimiter=':'): | |
903 """Description: | |
904 :param content: A list of tuples of name-content pairs. This is used | |
905 instead of a dict to ensure that scripts run in order | |
906 :type list of tuples: | |
907 | |
908 :param compress: Use gzip to compress the scripts, defaults to no compression | |
909 :type bool: | |
910 | |
911 :param deftype: The type that should be assumed if nothing else can be figured out | |
912 :type str: | |
913 | |
914 :param delimiter: mime delimiter | |
915 :type str: | |
916 | |
917 :return: Final mime multipart | |
918 :rtype: str: | |
919 """ | |
920 wrapper = email.mime.multipart.MIMEMultipart() | |
921 for name, con in content: | |
922 definite_type = guess_mime_type(con, deftype) | |
923 maintype, subtype = definite_type.split('/', 1) | |
924 if maintype == 'text': | |
925 mime_con = email.mime.text.MIMEText(con, _subtype=subtype) | |
926 else: | |
927 mime_con = email.mime.base.MIMEBase(maintype, subtype) | |
928 mime_con.set_payload(con) | |
929 # Encode the payload using Base64 | |
930 email.encoders.encode_base64(mime_con) | |
931 mime_con.add_header('Content-Disposition', 'attachment', filename=name) | |
932 wrapper.attach(mime_con) | |
933 rcontent = wrapper.as_string() | |
934 | |
935 if compress: | |
936 buf = StringIO() | |
937 gz = gzip.GzipFile(mode='wb', fileobj=buf) | |
938 try: | |
939 gz.write(rcontent) | |
940 finally: | |
941 gz.close() | |
942 rcontent = buf.getvalue() | |
943 | |
944 return rcontent | |
945 | |
946 | |
947 def guess_mime_type(content, deftype): | |
948 """Description: Guess the mime type of a block of text | |
949 :param content: content we're finding the type of | |
950 :type str: | |
951 | |
952 :param deftype: Default mime type | |
953 :type str: | |
954 | |
955 :rtype: <type>: | |
956 :return: <description> | |
957 """ | |
958 # Mappings recognized by cloudinit | |
959 starts_with_mappings = { | |
960 '#include': 'text/x-include-url', | |
961 '#!': 'text/x-shellscript', | |
962 '#cloud-config': 'text/cloud-config', | |
963 '#upstart-job': 'text/upstart-job', | |
964 '#part-handler': 'text/part-handler', | |
965 '#cloud-boothook': 'text/cloud-boothook' | |
966 } | |
967 rtype = deftype | |
968 for possible_type, mimetype in starts_with_mappings.items(): | |
969 if content.startswith(possible_type): | |
970 rtype = mimetype | |
971 break | |
972 return(rtype) | |
973 | |
974 | |
975 def compute_md5(fp, buf_size=8192, size=None): | |
976 """ | |
977 Compute MD5 hash on passed file and return results in a tuple of values. | |
978 | |
979 :type fp: file | |
980 :param fp: File pointer to the file to MD5 hash. The file pointer | |
981 will be reset to its current location before the | |
982 method returns. | |
983 | |
984 :type buf_size: integer | |
985 :param buf_size: Number of bytes per read request. | |
986 | |
987 :type size: int | |
988 :param size: (optional) The Maximum number of bytes to read from | |
989 the file pointer (fp). This is useful when uploading | |
990 a file in multiple parts where the file is being | |
991 split inplace into different parts. Less bytes may | |
992 be available. | |
993 | |
994 :rtype: tuple | |
995 :return: A tuple containing the hex digest version of the MD5 hash | |
996 as the first element, the base64 encoded version of the | |
997 plain digest as the second element and the data size as | |
998 the third element. | |
999 """ | |
1000 return compute_hash(fp, buf_size, size, hash_algorithm=md5) | |
1001 | |
1002 | |
1003 def compute_hash(fp, buf_size=8192, size=None, hash_algorithm=md5): | |
1004 hash_obj = hash_algorithm() | |
1005 spos = fp.tell() | |
1006 if size and size < buf_size: | |
1007 s = fp.read(size) | |
1008 else: | |
1009 s = fp.read(buf_size) | |
1010 while s: | |
1011 if not isinstance(s, bytes): | |
1012 s = s.encode('utf-8') | |
1013 hash_obj.update(s) | |
1014 if size: | |
1015 size -= len(s) | |
1016 if size <= 0: | |
1017 break | |
1018 if size and size < buf_size: | |
1019 s = fp.read(size) | |
1020 else: | |
1021 s = fp.read(buf_size) | |
1022 hex_digest = hash_obj.hexdigest() | |
1023 base64_digest = encodebytes(hash_obj.digest()).decode('utf-8') | |
1024 if base64_digest[-1] == '\n': | |
1025 base64_digest = base64_digest[0:-1] | |
1026 # data_size based on bytes read. | |
1027 data_size = fp.tell() - spos | |
1028 fp.seek(spos) | |
1029 return (hex_digest, base64_digest, data_size) | |
1030 | |
1031 | |
1032 def find_matching_headers(name, headers): | |
1033 """ | |
1034 Takes a specific header name and a dict of headers {"name": "value"}. | |
1035 Returns a list of matching header names, case-insensitive. | |
1036 | |
1037 """ | |
1038 return [h for h in headers if h.lower() == name.lower()] | |
1039 | |
1040 | |
1041 def merge_headers_by_name(name, headers): | |
1042 """ | |
1043 Takes a specific header name and a dict of headers {"name": "value"}. | |
1044 Returns a string of all header values, comma-separated, that match the | |
1045 input header name, case-insensitive. | |
1046 | |
1047 """ | |
1048 matching_headers = find_matching_headers(name, headers) | |
1049 return ','.join(str(headers[h]) for h in matching_headers | |
1050 if headers[h] is not None) | |
1051 | |
1052 | |
1053 class RequestHook(object): | |
1054 """ | |
1055 This can be extended and supplied to the connection object | |
1056 to gain access to request and response object after the request completes. | |
1057 One use for this would be to implement some specific request logging. | |
1058 """ | |
1059 def handle_request_data(self, request, response, error=False): | |
1060 pass | |
1061 | |
1062 | |
1063 def host_is_ipv6(hostname): | |
1064 """ | |
1065 Detect (naively) if the hostname is an IPV6 host. | |
1066 Return a boolean. | |
1067 """ | |
1068 # empty strings or anything that is not a string is automatically not an | |
1069 # IPV6 address | |
1070 if not hostname or not isinstance(hostname, str): | |
1071 return False | |
1072 | |
1073 if hostname.startswith('['): | |
1074 return True | |
1075 | |
1076 if len(hostname.split(':')) > 2: | |
1077 return True | |
1078 | |
1079 # Anything else that doesn't start with brackets or doesn't have more than | |
1080 # one ':' should not be an IPV6 address. This is very naive but the rest of | |
1081 # the connection chain should error accordingly for typos or ill formed | |
1082 # addresses | |
1083 return False | |
1084 | |
1085 | |
1086 def parse_host(hostname): | |
1087 """ | |
1088 Given a hostname that may have a port name, ensure that the port is trimmed | |
1089 returning only the host, including hostnames that are IPV6 and may include | |
1090 brackets. | |
1091 """ | |
1092 # ensure that hostname does not have any whitespaces | |
1093 hostname = hostname.strip() | |
1094 | |
1095 if host_is_ipv6(hostname): | |
1096 return hostname.split(']:', 1)[0].strip('[]') | |
1097 else: | |
1098 return hostname.split(':', 1)[0] |