comparison planemo/lib/python3.7/site-packages/boto/dynamodb2/items.py @ 0:d30785e31577 draft

"planemo upload commit 6eee67778febed82ddd413c3ca40b3183a3898f1"
author guerler
date Fri, 31 Jul 2020 00:18:57 -0400
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:d30785e31577
1 from copy import deepcopy
2
3
4 class NEWVALUE(object):
5 # A marker for new data added.
6 pass
7
8
9 class Item(object):
10 """
11 An object representing the item data within a DynamoDB table.
12
13 An item is largely schema-free, meaning it can contain any data. The only
14 limitation is that it must have data for the fields in the ``Table``'s
15 schema.
16
17 This object presents a dictionary-like interface for accessing/storing
18 data. It also tries to intelligently track how data has changed throughout
19 the life of the instance, to be as efficient as possible about updates.
20
21 Empty items, or items that have no data, are considered falsey.
22
23 """
24 def __init__(self, table, data=None, loaded=False):
25 """
26 Constructs an (unsaved) ``Item`` instance.
27
28 To persist the data in DynamoDB, you'll need to call the ``Item.save``
29 (or ``Item.partial_save``) on the instance.
30
31 Requires a ``table`` parameter, which should be a ``Table`` instance.
32 This is required, as DynamoDB's API is focus around all operations
33 being table-level. It's also for persisting schema around many objects.
34
35 Optionally accepts a ``data`` parameter, which should be a dictionary
36 of the fields & values of the item. Alternatively, an ``Item`` instance
37 may be provided from which to extract the data.
38
39 Optionally accepts a ``loaded`` parameter, which should be a boolean.
40 ``True`` if it was preexisting data loaded from DynamoDB, ``False`` if
41 it's new data from the user. Default is ``False``.
42
43 Example::
44
45 >>> users = Table('users')
46 >>> user = Item(users, data={
47 ... 'username': 'johndoe',
48 ... 'first_name': 'John',
49 ... 'date_joined': 1248o61592,
50 ... })
51
52 # Change existing data.
53 >>> user['first_name'] = 'Johann'
54 # Add more data.
55 >>> user['last_name'] = 'Doe'
56 # Delete data.
57 >>> del user['date_joined']
58
59 # Iterate over all the data.
60 >>> for field, val in user.items():
61 ... print "%s: %s" % (field, val)
62 username: johndoe
63 first_name: John
64 date_joined: 1248o61592
65
66 """
67 self.table = table
68 self._loaded = loaded
69 self._orig_data = {}
70 self._data = data
71 self._dynamizer = table._dynamizer
72
73 if isinstance(self._data, Item):
74 self._data = self._data._data
75 if self._data is None:
76 self._data = {}
77
78 if self._loaded:
79 self._orig_data = deepcopy(self._data)
80
81 def __getitem__(self, key):
82 return self._data.get(key, None)
83
84 def __setitem__(self, key, value):
85 self._data[key] = value
86
87 def __delitem__(self, key):
88 if not key in self._data:
89 return
90
91 del self._data[key]
92
93 def keys(self):
94 return self._data.keys()
95
96 def values(self):
97 return self._data.values()
98
99 def items(self):
100 return self._data.items()
101
102 def get(self, key, default=None):
103 return self._data.get(key, default)
104
105 def __iter__(self):
106 for key in self._data:
107 yield self._data[key]
108
109 def __contains__(self, key):
110 return key in self._data
111
112 def __bool__(self):
113 return bool(self._data)
114
115 __nonzero__ = __bool__
116
117 def _determine_alterations(self):
118 """
119 Checks the ``-orig_data`` against the ``_data`` to determine what
120 changes to the data are present.
121
122 Returns a dictionary containing the keys ``adds``, ``changes`` &
123 ``deletes``, containing the updated data.
124 """
125 alterations = {
126 'adds': {},
127 'changes': {},
128 'deletes': [],
129 }
130
131 orig_keys = set(self._orig_data.keys())
132 data_keys = set(self._data.keys())
133
134 # Run through keys we know are in both for changes.
135 for key in orig_keys.intersection(data_keys):
136 if self._data[key] != self._orig_data[key]:
137 if self._is_storable(self._data[key]):
138 alterations['changes'][key] = self._data[key]
139 else:
140 alterations['deletes'].append(key)
141
142 # Run through additions.
143 for key in data_keys.difference(orig_keys):
144 if self._is_storable(self._data[key]):
145 alterations['adds'][key] = self._data[key]
146
147 # Run through deletions.
148 for key in orig_keys.difference(data_keys):
149 alterations['deletes'].append(key)
150
151 return alterations
152
153 def needs_save(self, data=None):
154 """
155 Returns whether or not the data has changed on the ``Item``.
156
157 Optionally accepts a ``data`` argument, which accepts the output from
158 ``self._determine_alterations()`` if you've already called it. Typically
159 unnecessary to do. Default is ``None``.
160
161 Example:
162
163 >>> user.needs_save()
164 False
165 >>> user['first_name'] = 'Johann'
166 >>> user.needs_save()
167 True
168
169 """
170 if data is None:
171 data = self._determine_alterations()
172
173 needs_save = False
174
175 for kind in ['adds', 'changes', 'deletes']:
176 if len(data[kind]):
177 needs_save = True
178 break
179
180 return needs_save
181
182 def mark_clean(self):
183 """
184 Marks an ``Item`` instance as no longer needing to be saved.
185
186 Example:
187
188 >>> user.needs_save()
189 False
190 >>> user['first_name'] = 'Johann'
191 >>> user.needs_save()
192 True
193 >>> user.mark_clean()
194 >>> user.needs_save()
195 False
196
197 """
198 self._orig_data = deepcopy(self._data)
199
200 def mark_dirty(self):
201 """
202 DEPRECATED: Marks an ``Item`` instance as needing to be saved.
203
204 This method is no longer necessary, as the state tracking on ``Item``
205 has been improved to automatically detect proper state.
206 """
207 return
208
209 def load(self, data):
210 """
211 This is only useful when being handed raw data from DynamoDB directly.
212 If you have a Python datastructure already, use the ``__init__`` or
213 manually set the data instead.
214
215 Largely internal, unless you know what you're doing or are trying to
216 mix the low-level & high-level APIs.
217 """
218 self._data = {}
219
220 for field_name, field_value in data.get('Item', {}).items():
221 self[field_name] = self._dynamizer.decode(field_value)
222
223 self._loaded = True
224 self._orig_data = deepcopy(self._data)
225
226 def get_keys(self):
227 """
228 Returns a Python-style dict of the keys/values.
229
230 Largely internal.
231 """
232 key_fields = self.table.get_key_fields()
233 key_data = {}
234
235 for key in key_fields:
236 key_data[key] = self[key]
237
238 return key_data
239
240 def get_raw_keys(self):
241 """
242 Returns a DynamoDB-style dict of the keys/values.
243
244 Largely internal.
245 """
246 raw_key_data = {}
247
248 for key, value in self.get_keys().items():
249 raw_key_data[key] = self._dynamizer.encode(value)
250
251 return raw_key_data
252
253 def build_expects(self, fields=None):
254 """
255 Builds up a list of expecations to hand off to DynamoDB on save.
256
257 Largely internal.
258 """
259 expects = {}
260
261 if fields is None:
262 fields = list(self._data.keys()) + list(self._orig_data.keys())
263
264 # Only uniques.
265 fields = set(fields)
266
267 for key in fields:
268 expects[key] = {
269 'Exists': True,
270 }
271 value = None
272
273 # Check for invalid keys.
274 if not key in self._orig_data and not key in self._data:
275 raise ValueError("Unknown key %s provided." % key)
276
277 # States:
278 # * New field (only in _data)
279 # * Unchanged field (in both _data & _orig_data, same data)
280 # * Modified field (in both _data & _orig_data, different data)
281 # * Deleted field (only in _orig_data)
282 orig_value = self._orig_data.get(key, NEWVALUE)
283 current_value = self._data.get(key, NEWVALUE)
284
285 if orig_value == current_value:
286 # Existing field unchanged.
287 value = current_value
288 else:
289 if key in self._data:
290 if not key in self._orig_data:
291 # New field.
292 expects[key]['Exists'] = False
293 else:
294 # Existing field modified.
295 value = orig_value
296 else:
297 # Existing field deleted.
298 value = orig_value
299
300 if value is not None:
301 expects[key]['Value'] = self._dynamizer.encode(value)
302
303 return expects
304
305 def _is_storable(self, value):
306 # We need to prevent ``None``, empty string & empty set from
307 # heading to DDB, but allow false-y values like 0 & False make it.
308 if not value:
309 if not value in (0, 0.0, False):
310 return False
311
312 return True
313
314 def prepare_full(self):
315 """
316 Runs through all fields & encodes them to be handed off to DynamoDB
317 as part of an ``save`` (``put_item``) call.
318
319 Largely internal.
320 """
321 # This doesn't save on its own. Rather, we prepare the datastructure
322 # and hand-off to the table to handle creation/update.
323 final_data = {}
324
325 for key, value in self._data.items():
326 if not self._is_storable(value):
327 continue
328
329 final_data[key] = self._dynamizer.encode(value)
330
331 return final_data
332
333 def prepare_partial(self):
334 """
335 Runs through **ONLY** the changed/deleted fields & encodes them to be
336 handed off to DynamoDB as part of an ``partial_save`` (``update_item``)
337 call.
338
339 Largely internal.
340 """
341 # This doesn't save on its own. Rather, we prepare the datastructure
342 # and hand-off to the table to handle creation/update.
343 final_data = {}
344 fields = set()
345 alterations = self._determine_alterations()
346
347 for key, value in alterations['adds'].items():
348 final_data[key] = {
349 'Action': 'PUT',
350 'Value': self._dynamizer.encode(self._data[key])
351 }
352 fields.add(key)
353
354 for key, value in alterations['changes'].items():
355 final_data[key] = {
356 'Action': 'PUT',
357 'Value': self._dynamizer.encode(self._data[key])
358 }
359 fields.add(key)
360
361 for key in alterations['deletes']:
362 final_data[key] = {
363 'Action': 'DELETE',
364 }
365 fields.add(key)
366
367 return final_data, fields
368
369 def partial_save(self):
370 """
371 Saves only the changed data to DynamoDB.
372
373 Extremely useful for high-volume/high-write data sets, this allows
374 you to update only a handful of fields rather than having to push
375 entire items. This prevents many accidental overwrite situations as
376 well as saves on the amount of data to transfer over the wire.
377
378 Returns ``True`` on success, ``False`` if no save was performed or
379 the write failed.
380
381 Example::
382
383 >>> user['last_name'] = 'Doh!'
384 # Only the last name field will be sent to DynamoDB.
385 >>> user.partial_save()
386
387 """
388 key = self.get_keys()
389 # Build a new dict of only the data we're changing.
390 final_data, fields = self.prepare_partial()
391
392 if not final_data:
393 return False
394
395 # Remove the key(s) from the ``final_data`` if present.
396 # They should only be present if this is a new item, in which
397 # case we shouldn't be sending as part of the data to update.
398 for fieldname, value in key.items():
399 if fieldname in final_data:
400 del final_data[fieldname]
401
402 try:
403 # It's likely also in ``fields``, so remove it there too.
404 fields.remove(fieldname)
405 except KeyError:
406 pass
407
408 # Build expectations of only the fields we're planning to update.
409 expects = self.build_expects(fields=fields)
410 returned = self.table._update_item(key, final_data, expects=expects)
411 # Mark the object as clean.
412 self.mark_clean()
413 return returned
414
415 def save(self, overwrite=False):
416 """
417 Saves all data to DynamoDB.
418
419 By default, this attempts to ensure that none of the underlying
420 data has changed. If any fields have changed in between when the
421 ``Item`` was constructed & when it is saved, this call will fail so
422 as not to cause any data loss.
423
424 If you're sure possibly overwriting data is acceptable, you can pass
425 an ``overwrite=True``. If that's not acceptable, you may be able to use
426 ``Item.partial_save`` to only write the changed field data.
427
428 Optionally accepts an ``overwrite`` parameter, which should be a
429 boolean. If you provide ``True``, the item will be forcibly overwritten
430 within DynamoDB, even if another process changed the data in the
431 meantime. (Default: ``False``)
432
433 Returns ``True`` on success, ``False`` if no save was performed.
434
435 Example::
436
437 >>> user['last_name'] = 'Doh!'
438 # All data on the Item is sent to DynamoDB.
439 >>> user.save()
440
441 # If it fails, you can overwrite.
442 >>> user.save(overwrite=True)
443
444 """
445 if not self.needs_save() and not overwrite:
446 return False
447
448 final_data = self.prepare_full()
449 expects = None
450
451 if overwrite is False:
452 # Build expectations about *all* of the data.
453 expects = self.build_expects()
454
455 returned = self.table._put_item(final_data, expects=expects)
456 # Mark the object as clean.
457 self.mark_clean()
458 return returned
459
460 def delete(self):
461 """
462 Deletes the item's data to DynamoDB.
463
464 Returns ``True`` on success.
465
466 Example::
467
468 # Buh-bye now.
469 >>> user.delete()
470
471 """
472 key_data = self.get_keys()
473 return self.table.delete_item(**key_data)