Mercurial > repos > guerler > springsuite
comparison planemo/lib/python3.7/site-packages/boto/sdb/db/manager/sdbmanager.py @ 0:d30785e31577 draft
"planemo upload commit 6eee67778febed82ddd413c3ca40b3183a3898f1"
author | guerler |
---|---|
date | Fri, 31 Jul 2020 00:18:57 -0400 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:d30785e31577 |
---|---|
1 # Copyright (c) 2006,2007,2008 Mitch Garnaat http://garnaat.org/ | |
2 # Copyright (c) 2010 Chris Moyer http://coredumped.org/ | |
3 # | |
4 # Permission is hereby granted, free of charge, to any person obtaining a | |
5 # copy of this software and associated documentation files (the | |
6 # "Software"), to deal in the Software without restriction, including | |
7 # without limitation the rights to use, copy, modify, merge, publish, dis- | |
8 # tribute, sublicense, and/or sell copies of the Software, and to permit | |
9 # persons to whom the Software is furnished to do so, subject to the fol- | |
10 # lowing conditions: | |
11 # | |
12 # The above copyright notice and this permission notice shall be included | |
13 # in all copies or substantial portions of the Software. | |
14 # | |
15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
16 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- | |
17 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT | |
18 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, | |
19 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |
20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS | |
21 # IN THE SOFTWARE. | |
22 import boto | |
23 import re | |
24 from boto.utils import find_class | |
25 import uuid | |
26 from boto.sdb.db.key import Key | |
27 from boto.sdb.db.blob import Blob | |
28 from boto.sdb.db.property import ListProperty, MapProperty | |
29 from datetime import datetime, date, time | |
30 from boto.exception import SDBPersistenceError, S3ResponseError | |
31 from boto.compat import map, six, long_type | |
32 | |
33 ISO8601 = '%Y-%m-%dT%H:%M:%SZ' | |
34 | |
35 | |
36 class TimeDecodeError(Exception): | |
37 pass | |
38 | |
39 | |
40 class SDBConverter(object): | |
41 """ | |
42 Responsible for converting base Python types to format compatible | |
43 with underlying database. For SimpleDB, that means everything | |
44 needs to be converted to a string when stored in SimpleDB and from | |
45 a string when retrieved. | |
46 | |
47 To convert a value, pass it to the encode or decode method. The | |
48 encode method will take a Python native value and convert to DB | |
49 format. The decode method will take a DB format value and convert | |
50 it to Python native format. To find the appropriate method to | |
51 call, the generic encode/decode methods will look for the | |
52 type-specific method by searching for a method | |
53 called"encode_<type name>" or "decode_<type name>". | |
54 """ | |
55 def __init__(self, manager): | |
56 # Do a delayed import to prevent possible circular import errors. | |
57 from boto.sdb.db.model import Model | |
58 self.model_class = Model | |
59 self.manager = manager | |
60 self.type_map = {bool: (self.encode_bool, self.decode_bool), | |
61 int: (self.encode_int, self.decode_int), | |
62 float: (self.encode_float, self.decode_float), | |
63 self.model_class: ( | |
64 self.encode_reference, self.decode_reference | |
65 ), | |
66 Key: (self.encode_reference, self.decode_reference), | |
67 datetime: (self.encode_datetime, self.decode_datetime), | |
68 date: (self.encode_date, self.decode_date), | |
69 time: (self.encode_time, self.decode_time), | |
70 Blob: (self.encode_blob, self.decode_blob), | |
71 str: (self.encode_string, self.decode_string), | |
72 } | |
73 if six.PY2: | |
74 self.type_map[long] = (self.encode_long, self.decode_long) | |
75 | |
76 def encode(self, item_type, value): | |
77 try: | |
78 if self.model_class in item_type.mro(): | |
79 item_type = self.model_class | |
80 except: | |
81 pass | |
82 if item_type in self.type_map: | |
83 encode = self.type_map[item_type][0] | |
84 return encode(value) | |
85 return value | |
86 | |
87 def decode(self, item_type, value): | |
88 if item_type in self.type_map: | |
89 decode = self.type_map[item_type][1] | |
90 return decode(value) | |
91 return value | |
92 | |
93 def encode_list(self, prop, value): | |
94 if value in (None, []): | |
95 return [] | |
96 if not isinstance(value, list): | |
97 # This is a little trick to avoid encoding when it's just a single value, | |
98 # since that most likely means it's from a query | |
99 item_type = getattr(prop, "item_type") | |
100 return self.encode(item_type, value) | |
101 # Just enumerate(value) won't work here because | |
102 # we need to add in some zero padding | |
103 # We support lists up to 1,000 attributes, since | |
104 # SDB technically only supports 1024 attributes anyway | |
105 values = {} | |
106 for k, v in enumerate(value): | |
107 values["%03d" % k] = v | |
108 return self.encode_map(prop, values) | |
109 | |
110 def encode_map(self, prop, value): | |
111 import urllib | |
112 if value is None: | |
113 return None | |
114 if not isinstance(value, dict): | |
115 raise ValueError('Expected a dict value, got %s' % type(value)) | |
116 new_value = [] | |
117 for key in value: | |
118 item_type = getattr(prop, "item_type") | |
119 if self.model_class in item_type.mro(): | |
120 item_type = self.model_class | |
121 encoded_value = self.encode(item_type, value[key]) | |
122 if encoded_value is not None: | |
123 new_value.append('%s:%s' % (urllib.quote(key), encoded_value)) | |
124 return new_value | |
125 | |
126 def encode_prop(self, prop, value): | |
127 if isinstance(prop, ListProperty): | |
128 return self.encode_list(prop, value) | |
129 elif isinstance(prop, MapProperty): | |
130 return self.encode_map(prop, value) | |
131 else: | |
132 return self.encode(prop.data_type, value) | |
133 | |
134 def decode_list(self, prop, value): | |
135 if not isinstance(value, list): | |
136 value = [value] | |
137 if hasattr(prop, 'item_type'): | |
138 item_type = getattr(prop, "item_type") | |
139 dec_val = {} | |
140 for val in value: | |
141 if val is not None: | |
142 k, v = self.decode_map_element(item_type, val) | |
143 try: | |
144 k = int(k) | |
145 except: | |
146 k = v | |
147 dec_val[k] = v | |
148 value = dec_val.values() | |
149 return value | |
150 | |
151 def decode_map(self, prop, value): | |
152 if not isinstance(value, list): | |
153 value = [value] | |
154 ret_value = {} | |
155 item_type = getattr(prop, "item_type") | |
156 for val in value: | |
157 k, v = self.decode_map_element(item_type, val) | |
158 ret_value[k] = v | |
159 return ret_value | |
160 | |
161 def decode_map_element(self, item_type, value): | |
162 """Decode a single element for a map""" | |
163 import urllib | |
164 key = value | |
165 if ":" in value: | |
166 key, value = value.split(':', 1) | |
167 key = urllib.unquote(key) | |
168 if self.model_class in item_type.mro(): | |
169 value = item_type(id=value) | |
170 else: | |
171 value = self.decode(item_type, value) | |
172 return (key, value) | |
173 | |
174 def decode_prop(self, prop, value): | |
175 if isinstance(prop, ListProperty): | |
176 return self.decode_list(prop, value) | |
177 elif isinstance(prop, MapProperty): | |
178 return self.decode_map(prop, value) | |
179 else: | |
180 return self.decode(prop.data_type, value) | |
181 | |
182 def encode_int(self, value): | |
183 value = int(value) | |
184 value += 2147483648 | |
185 return '%010d' % value | |
186 | |
187 def decode_int(self, value): | |
188 try: | |
189 value = int(value) | |
190 except: | |
191 boto.log.error("Error, %s is not an integer" % value) | |
192 value = 0 | |
193 value = int(value) | |
194 value -= 2147483648 | |
195 return int(value) | |
196 | |
197 def encode_long(self, value): | |
198 value = long_type(value) | |
199 value += 9223372036854775808 | |
200 return '%020d' % value | |
201 | |
202 def decode_long(self, value): | |
203 value = long_type(value) | |
204 value -= 9223372036854775808 | |
205 return value | |
206 | |
207 def encode_bool(self, value): | |
208 if value == True or str(value).lower() in ("true", "yes"): | |
209 return 'true' | |
210 else: | |
211 return 'false' | |
212 | |
213 def decode_bool(self, value): | |
214 if value.lower() == 'true': | |
215 return True | |
216 else: | |
217 return False | |
218 | |
219 def encode_float(self, value): | |
220 """ | |
221 See http://tools.ietf.org/html/draft-wood-ldapext-float-00. | |
222 """ | |
223 s = '%e' % value | |
224 l = s.split('e') | |
225 mantissa = l[0].ljust(18, '0') | |
226 exponent = l[1] | |
227 if value == 0.0: | |
228 case = '3' | |
229 exponent = '000' | |
230 elif mantissa[0] != '-' and exponent[0] == '+': | |
231 case = '5' | |
232 exponent = exponent[1:].rjust(3, '0') | |
233 elif mantissa[0] != '-' and exponent[0] == '-': | |
234 case = '4' | |
235 exponent = 999 + int(exponent) | |
236 exponent = '%03d' % exponent | |
237 elif mantissa[0] == '-' and exponent[0] == '-': | |
238 case = '2' | |
239 mantissa = '%f' % (10 + float(mantissa)) | |
240 mantissa = mantissa.ljust(18, '0') | |
241 exponent = exponent[1:].rjust(3, '0') | |
242 else: | |
243 case = '1' | |
244 mantissa = '%f' % (10 + float(mantissa)) | |
245 mantissa = mantissa.ljust(18, '0') | |
246 exponent = 999 - int(exponent) | |
247 exponent = '%03d' % exponent | |
248 return '%s %s %s' % (case, exponent, mantissa) | |
249 | |
250 def decode_float(self, value): | |
251 case = value[0] | |
252 exponent = value[2:5] | |
253 mantissa = value[6:] | |
254 if case == '3': | |
255 return 0.0 | |
256 elif case == '5': | |
257 pass | |
258 elif case == '4': | |
259 exponent = '%03d' % (int(exponent) - 999) | |
260 elif case == '2': | |
261 mantissa = '%f' % (float(mantissa) - 10) | |
262 exponent = '-' + exponent | |
263 else: | |
264 mantissa = '%f' % (float(mantissa) - 10) | |
265 exponent = '%03d' % abs((int(exponent) - 999)) | |
266 return float(mantissa + 'e' + exponent) | |
267 | |
268 def encode_datetime(self, value): | |
269 if isinstance(value, six.string_types): | |
270 return value | |
271 if isinstance(value, datetime): | |
272 return value.strftime(ISO8601) | |
273 else: | |
274 return value.isoformat() | |
275 | |
276 def decode_datetime(self, value): | |
277 """Handles both Dates and DateTime objects""" | |
278 if value is None: | |
279 return value | |
280 try: | |
281 if "T" in value: | |
282 if "." in value: | |
283 # Handle true "isoformat()" dates, which may have a microsecond on at the end of them | |
284 return datetime.strptime(value.split(".")[0], "%Y-%m-%dT%H:%M:%S") | |
285 else: | |
286 return datetime.strptime(value, ISO8601) | |
287 else: | |
288 value = value.split("-") | |
289 return date(int(value[0]), int(value[1]), int(value[2])) | |
290 except Exception: | |
291 return None | |
292 | |
293 def encode_date(self, value): | |
294 if isinstance(value, six.string_types): | |
295 return value | |
296 return value.isoformat() | |
297 | |
298 def decode_date(self, value): | |
299 try: | |
300 value = value.split("-") | |
301 return date(int(value[0]), int(value[1]), int(value[2])) | |
302 except: | |
303 return None | |
304 | |
305 encode_time = encode_date | |
306 | |
307 def decode_time(self, value): | |
308 """ converts strings in the form of HH:MM:SS.mmmmmm | |
309 (created by datetime.time.isoformat()) to | |
310 datetime.time objects. | |
311 | |
312 Timzone-aware strings ("HH:MM:SS.mmmmmm+HH:MM") won't | |
313 be handled right now and will raise TimeDecodeError. | |
314 """ | |
315 if '-' in value or '+' in value: | |
316 # TODO: Handle tzinfo | |
317 raise TimeDecodeError("Can't handle timezone aware objects: %r" % value) | |
318 tmp = value.split('.') | |
319 arg = map(int, tmp[0].split(':')) | |
320 if len(tmp) == 2: | |
321 arg.append(int(tmp[1])) | |
322 return time(*arg) | |
323 | |
324 def encode_reference(self, value): | |
325 if value in (None, 'None', '', ' '): | |
326 return None | |
327 if isinstance(value, six.string_types): | |
328 return value | |
329 else: | |
330 return value.id | |
331 | |
332 def decode_reference(self, value): | |
333 if not value or value == "None": | |
334 return None | |
335 return value | |
336 | |
337 def encode_blob(self, value): | |
338 if not value: | |
339 return None | |
340 if isinstance(value, six.string_types): | |
341 return value | |
342 | |
343 if not value.id: | |
344 bucket = self.manager.get_blob_bucket() | |
345 key = bucket.new_key(str(uuid.uuid4())) | |
346 value.id = "s3://%s/%s" % (key.bucket.name, key.name) | |
347 else: | |
348 match = re.match("^s3:\/\/([^\/]*)\/(.*)$", value.id) | |
349 if match: | |
350 s3 = self.manager.get_s3_connection() | |
351 bucket = s3.get_bucket(match.group(1), validate=False) | |
352 key = bucket.get_key(match.group(2)) | |
353 else: | |
354 raise SDBPersistenceError("Invalid Blob ID: %s" % value.id) | |
355 | |
356 if value.value is not None: | |
357 key.set_contents_from_string(value.value) | |
358 return value.id | |
359 | |
360 def decode_blob(self, value): | |
361 if not value: | |
362 return None | |
363 match = re.match("^s3:\/\/([^\/]*)\/(.*)$", value) | |
364 if match: | |
365 s3 = self.manager.get_s3_connection() | |
366 bucket = s3.get_bucket(match.group(1), validate=False) | |
367 try: | |
368 key = bucket.get_key(match.group(2)) | |
369 except S3ResponseError as e: | |
370 if e.reason != "Forbidden": | |
371 raise | |
372 return None | |
373 else: | |
374 return None | |
375 if key: | |
376 return Blob(file=key, id="s3://%s/%s" % (key.bucket.name, key.name)) | |
377 else: | |
378 return None | |
379 | |
380 def encode_string(self, value): | |
381 """Convert ASCII, Latin-1 or UTF-8 to pure Unicode""" | |
382 if not isinstance(value, str): | |
383 return value | |
384 try: | |
385 return six.text_type(value, 'utf-8') | |
386 except: | |
387 # really, this should throw an exception. | |
388 # in the interest of not breaking current | |
389 # systems, however: | |
390 arr = [] | |
391 for ch in value: | |
392 arr.append(six.unichr(ord(ch))) | |
393 return u"".join(arr) | |
394 | |
395 def decode_string(self, value): | |
396 """Decoding a string is really nothing, just | |
397 return the value as-is""" | |
398 return value | |
399 | |
400 | |
401 class SDBManager(object): | |
402 | |
403 def __init__(self, cls, db_name, db_user, db_passwd, | |
404 db_host, db_port, db_table, ddl_dir, enable_ssl, | |
405 consistent=None): | |
406 self.cls = cls | |
407 self.db_name = db_name | |
408 self.db_user = db_user | |
409 self.db_passwd = db_passwd | |
410 self.db_host = db_host | |
411 self.db_port = db_port | |
412 self.db_table = db_table | |
413 self.ddl_dir = ddl_dir | |
414 self.enable_ssl = enable_ssl | |
415 self.s3 = None | |
416 self.bucket = None | |
417 self.converter = SDBConverter(self) | |
418 self._sdb = None | |
419 self._domain = None | |
420 if consistent is None and hasattr(cls, "__consistent__"): | |
421 consistent = cls.__consistent__ | |
422 self.consistent = consistent | |
423 | |
424 @property | |
425 def sdb(self): | |
426 if self._sdb is None: | |
427 self._connect() | |
428 return self._sdb | |
429 | |
430 @property | |
431 def domain(self): | |
432 if self._domain is None: | |
433 self._connect() | |
434 return self._domain | |
435 | |
436 def _connect(self): | |
437 args = dict(aws_access_key_id=self.db_user, | |
438 aws_secret_access_key=self.db_passwd, | |
439 is_secure=self.enable_ssl) | |
440 try: | |
441 region = [x for x in boto.sdb.regions() if x.endpoint == self.db_host][0] | |
442 args['region'] = region | |
443 except IndexError: | |
444 pass | |
445 self._sdb = boto.connect_sdb(**args) | |
446 # This assumes that the domain has already been created | |
447 # It's much more efficient to do it this way rather than | |
448 # having this make a roundtrip each time to validate. | |
449 # The downside is that if the domain doesn't exist, it breaks | |
450 self._domain = self._sdb.lookup(self.db_name, validate=False) | |
451 if not self._domain: | |
452 self._domain = self._sdb.create_domain(self.db_name) | |
453 | |
454 def _object_lister(self, cls, query_lister): | |
455 for item in query_lister: | |
456 obj = self.get_object(cls, item.name, item) | |
457 if obj: | |
458 yield obj | |
459 | |
460 def encode_value(self, prop, value): | |
461 if value is None: | |
462 return None | |
463 if not prop: | |
464 return str(value) | |
465 return self.converter.encode_prop(prop, value) | |
466 | |
467 def decode_value(self, prop, value): | |
468 return self.converter.decode_prop(prop, value) | |
469 | |
470 def get_s3_connection(self): | |
471 if not self.s3: | |
472 self.s3 = boto.connect_s3(self.db_user, self.db_passwd) | |
473 return self.s3 | |
474 | |
475 def get_blob_bucket(self, bucket_name=None): | |
476 s3 = self.get_s3_connection() | |
477 bucket_name = "%s-%s" % (s3.aws_access_key_id, self.domain.name) | |
478 bucket_name = bucket_name.lower() | |
479 try: | |
480 self.bucket = s3.get_bucket(bucket_name) | |
481 except: | |
482 self.bucket = s3.create_bucket(bucket_name) | |
483 return self.bucket | |
484 | |
485 def load_object(self, obj): | |
486 if not obj._loaded: | |
487 a = self.domain.get_attributes(obj.id, consistent_read=self.consistent) | |
488 if '__type__' in a: | |
489 for prop in obj.properties(hidden=False): | |
490 if prop.name in a: | |
491 value = self.decode_value(prop, a[prop.name]) | |
492 value = prop.make_value_from_datastore(value) | |
493 try: | |
494 setattr(obj, prop.name, value) | |
495 except Exception as e: | |
496 boto.log.exception(e) | |
497 obj._loaded = True | |
498 | |
499 def get_object(self, cls, id, a=None): | |
500 obj = None | |
501 if not a: | |
502 a = self.domain.get_attributes(id, consistent_read=self.consistent) | |
503 if '__type__' in a: | |
504 if not cls or a['__type__'] != cls.__name__: | |
505 cls = find_class(a['__module__'], a['__type__']) | |
506 if cls: | |
507 params = {} | |
508 for prop in cls.properties(hidden=False): | |
509 if prop.name in a: | |
510 value = self.decode_value(prop, a[prop.name]) | |
511 value = prop.make_value_from_datastore(value) | |
512 params[prop.name] = value | |
513 obj = cls(id, **params) | |
514 obj._loaded = True | |
515 else: | |
516 s = '(%s) class %s.%s not found' % (id, a['__module__'], a['__type__']) | |
517 boto.log.info('sdbmanager: %s' % s) | |
518 return obj | |
519 | |
520 def get_object_from_id(self, id): | |
521 return self.get_object(None, id) | |
522 | |
523 def query(self, query): | |
524 query_str = "select * from `%s` %s" % (self.domain.name, self._build_filter_part(query.model_class, query.filters, query.sort_by, query.select)) | |
525 if query.limit: | |
526 query_str += " limit %s" % query.limit | |
527 rs = self.domain.select(query_str, max_items=query.limit, next_token=query.next_token) | |
528 query.rs = rs | |
529 return self._object_lister(query.model_class, rs) | |
530 | |
531 def count(self, cls, filters, quick=True, sort_by=None, select=None): | |
532 """ | |
533 Get the number of results that would | |
534 be returned in this query | |
535 """ | |
536 query = "select count(*) from `%s` %s" % (self.domain.name, self._build_filter_part(cls, filters, sort_by, select)) | |
537 count = 0 | |
538 for row in self.domain.select(query): | |
539 count += int(row['Count']) | |
540 if quick: | |
541 return count | |
542 return count | |
543 | |
544 def _build_filter(self, property, name, op, val): | |
545 if name == "__id__": | |
546 name = 'itemName()' | |
547 if name != "itemName()": | |
548 name = '`%s`' % name | |
549 if val is None: | |
550 if op in ('is', '='): | |
551 return "%(name)s is null" % {"name": name} | |
552 elif op in ('is not', '!='): | |
553 return "%s is not null" % name | |
554 else: | |
555 val = "" | |
556 if property.__class__ == ListProperty: | |
557 if op in ("is", "="): | |
558 op = "like" | |
559 elif op in ("!=", "not"): | |
560 op = "not like" | |
561 if not(op in ["like", "not like"] and val.startswith("%")): | |
562 val = "%%:%s" % val | |
563 return "%s %s '%s'" % (name, op, val.replace("'", "''")) | |
564 | |
565 def _build_filter_part(self, cls, filters, order_by=None, select=None): | |
566 """ | |
567 Build the filter part | |
568 """ | |
569 import types | |
570 query_parts = [] | |
571 | |
572 order_by_filtered = False | |
573 | |
574 if order_by: | |
575 if order_by[0] == "-": | |
576 order_by_method = "DESC" | |
577 order_by = order_by[1:] | |
578 else: | |
579 order_by_method = "ASC" | |
580 | |
581 if select: | |
582 if order_by and order_by in select: | |
583 order_by_filtered = True | |
584 query_parts.append("(%s)" % select) | |
585 | |
586 if isinstance(filters, six.string_types): | |
587 query = "WHERE %s AND `__type__` = '%s'" % (filters, cls.__name__) | |
588 if order_by in ["__id__", "itemName()"]: | |
589 query += " ORDER BY itemName() %s" % order_by_method | |
590 elif order_by is not None: | |
591 query += " ORDER BY `%s` %s" % (order_by, order_by_method) | |
592 return query | |
593 | |
594 for filter in filters: | |
595 filter_parts = [] | |
596 filter_props = filter[0] | |
597 if not isinstance(filter_props, list): | |
598 filter_props = [filter_props] | |
599 for filter_prop in filter_props: | |
600 (name, op) = filter_prop.strip().split(" ", 1) | |
601 value = filter[1] | |
602 property = cls.find_property(name) | |
603 if name == order_by: | |
604 order_by_filtered = True | |
605 if types.TypeType(value) == list: | |
606 filter_parts_sub = [] | |
607 for val in value: | |
608 val = self.encode_value(property, val) | |
609 if isinstance(val, list): | |
610 for v in val: | |
611 filter_parts_sub.append(self._build_filter(property, name, op, v)) | |
612 else: | |
613 filter_parts_sub.append(self._build_filter(property, name, op, val)) | |
614 filter_parts.append("(%s)" % (" OR ".join(filter_parts_sub))) | |
615 else: | |
616 val = self.encode_value(property, value) | |
617 if isinstance(val, list): | |
618 for v in val: | |
619 filter_parts.append(self._build_filter(property, name, op, v)) | |
620 else: | |
621 filter_parts.append(self._build_filter(property, name, op, val)) | |
622 query_parts.append("(%s)" % (" or ".join(filter_parts))) | |
623 | |
624 | |
625 type_query = "(`__type__` = '%s'" % cls.__name__ | |
626 for subclass in self._get_all_decendents(cls).keys(): | |
627 type_query += " or `__type__` = '%s'" % subclass | |
628 type_query += ")" | |
629 query_parts.append(type_query) | |
630 | |
631 order_by_query = "" | |
632 | |
633 if order_by: | |
634 if not order_by_filtered: | |
635 query_parts.append("`%s` LIKE '%%'" % order_by) | |
636 if order_by in ["__id__", "itemName()"]: | |
637 order_by_query = " ORDER BY itemName() %s" % order_by_method | |
638 else: | |
639 order_by_query = " ORDER BY `%s` %s" % (order_by, order_by_method) | |
640 | |
641 if len(query_parts) > 0: | |
642 return "WHERE %s %s" % (" AND ".join(query_parts), order_by_query) | |
643 else: | |
644 return "" | |
645 | |
646 | |
647 def _get_all_decendents(self, cls): | |
648 """Get all decendents for a given class""" | |
649 decendents = {} | |
650 for sc in cls.__sub_classes__: | |
651 decendents[sc.__name__] = sc | |
652 decendents.update(self._get_all_decendents(sc)) | |
653 return decendents | |
654 | |
655 def query_gql(self, query_string, *args, **kwds): | |
656 raise NotImplementedError("GQL queries not supported in SimpleDB") | |
657 | |
658 def save_object(self, obj, expected_value=None): | |
659 if not obj.id: | |
660 obj.id = str(uuid.uuid4()) | |
661 | |
662 attrs = {'__type__': obj.__class__.__name__, | |
663 '__module__': obj.__class__.__module__, | |
664 '__lineage__': obj.get_lineage()} | |
665 del_attrs = [] | |
666 for property in obj.properties(hidden=False): | |
667 value = property.get_value_for_datastore(obj) | |
668 if value is not None: | |
669 value = self.encode_value(property, value) | |
670 if value == []: | |
671 value = None | |
672 if value is None: | |
673 del_attrs.append(property.name) | |
674 continue | |
675 attrs[property.name] = value | |
676 if property.unique: | |
677 try: | |
678 args = {property.name: value} | |
679 obj2 = next(obj.find(**args)) | |
680 if obj2.id != obj.id: | |
681 raise SDBPersistenceError("Error: %s must be unique!" % property.name) | |
682 except(StopIteration): | |
683 pass | |
684 # Convert the Expected value to SDB format | |
685 if expected_value: | |
686 prop = obj.find_property(expected_value[0]) | |
687 v = expected_value[1] | |
688 if v is not None and not isinstance(v, bool): | |
689 v = self.encode_value(prop, v) | |
690 expected_value[1] = v | |
691 self.domain.put_attributes(obj.id, attrs, replace=True, expected_value=expected_value) | |
692 if len(del_attrs) > 0: | |
693 self.domain.delete_attributes(obj.id, del_attrs) | |
694 return obj | |
695 | |
696 def delete_object(self, obj): | |
697 self.domain.delete_attributes(obj.id) | |
698 | |
699 def set_property(self, prop, obj, name, value): | |
700 setattr(obj, name, value) | |
701 value = prop.get_value_for_datastore(obj) | |
702 value = self.encode_value(prop, value) | |
703 if prop.unique: | |
704 try: | |
705 args = {prop.name: value} | |
706 obj2 = next(obj.find(**args)) | |
707 if obj2.id != obj.id: | |
708 raise SDBPersistenceError("Error: %s must be unique!" % prop.name) | |
709 except(StopIteration): | |
710 pass | |
711 self.domain.put_attributes(obj.id, {name: value}, replace=True) | |
712 | |
713 def get_property(self, prop, obj, name): | |
714 a = self.domain.get_attributes(obj.id, consistent_read=self.consistent) | |
715 | |
716 # try to get the attribute value from SDB | |
717 if name in a: | |
718 value = self.decode_value(prop, a[name]) | |
719 value = prop.make_value_from_datastore(value) | |
720 setattr(obj, prop.name, value) | |
721 return value | |
722 raise AttributeError('%s not found' % name) | |
723 | |
724 def set_key_value(self, obj, name, value): | |
725 self.domain.put_attributes(obj.id, {name: value}, replace=True) | |
726 | |
727 def delete_key_value(self, obj, name): | |
728 self.domain.delete_attributes(obj.id, name) | |
729 | |
730 def get_key_value(self, obj, name): | |
731 a = self.domain.get_attributes(obj.id, name, consistent_read=self.consistent) | |
732 if name in a: | |
733 return a[name] | |
734 else: | |
735 return None | |
736 | |
737 def get_raw_item(self, obj): | |
738 return self.domain.get_item(obj.id) |