comparison env/lib/python3.7/site-packages/bioblend/galaxy/objects/wrappers.py @ 0:26e78fe6e8c4 draft

"planemo upload commit c699937486c35866861690329de38ec1a5d9f783"
author shellac
date Sat, 02 May 2020 07:14:21 -0400
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:26e78fe6e8c4
1 # pylint: disable=W0622,E1101
2
3 """
4 A basic object-oriented interface for Galaxy entities.
5 """
6
7 import abc
8 import collections
9 import json
10
11 import six
12
13 import bioblend
14
15 __all__ = (
16 'Wrapper',
17 'Step',
18 'Workflow',
19 'ContentInfo',
20 'LibraryContentInfo',
21 'HistoryContentInfo',
22 'DatasetContainer',
23 'History',
24 'Library',
25 'Folder',
26 'Dataset',
27 'HistoryDatasetAssociation',
28 'DatasetCollection',
29 'HistoryDatasetCollectionAssociation',
30 'LibraryDatasetDatasetAssociation',
31 'LibraryDataset',
32 'Tool',
33 'Job',
34 'Preview',
35 'LibraryPreview',
36 'HistoryPreview',
37 'WorkflowPreview',
38 )
39
40
41 @six.add_metaclass(abc.ABCMeta)
42 class Wrapper(object):
43 """
44 Abstract base class for Galaxy entity wrappers.
45
46 Wrapper instances wrap deserialized JSON dictionaries such as the
47 ones obtained by the Galaxy web API, converting key-based access to
48 attribute-based access (e.g., ``library['name'] -> library.name``).
49
50 Dict keys that are converted to attributes are listed in the
51 ``BASE_ATTRS`` class variable: this is the 'stable' interface.
52 Note that the wrapped dictionary is accessible via the ``wrapped``
53 attribute.
54 """
55 BASE_ATTRS = ('id', 'name')
56
57 @abc.abstractmethod
58 def __init__(self, wrapped, parent=None, gi=None):
59 """
60 :type wrapped: dict
61 :param wrapped: JSON-serializable dictionary
62
63 :type parent: :class:`Wrapper`
64 :param parent: the parent of this wrapper
65
66 :type gi: :class:`GalaxyInstance`
67 :param gi: the GalaxyInstance through which we can access this wrapper
68 """
69 if not isinstance(wrapped, collections.Mapping):
70 raise TypeError('wrapped object must be a mapping type')
71 # loads(dumps(x)) is a bit faster than deepcopy and allows type checks
72 try:
73 dumped = json.dumps(wrapped)
74 except (TypeError, ValueError):
75 raise ValueError('wrapped object must be JSON-serializable')
76 object.__setattr__(self, 'wrapped', json.loads(dumped))
77 for k in self.BASE_ATTRS:
78 object.__setattr__(self, k, self.wrapped.get(k))
79 object.__setattr__(self, '_cached_parent', parent)
80 object.__setattr__(self, 'is_modified', False)
81 object.__setattr__(self, 'gi', gi)
82
83 @abc.abstractproperty
84 def gi_module(self):
85 """
86 The GalaxyInstance module that deals with objects of this type.
87 """
88 pass
89
90 @property
91 def parent(self):
92 """
93 The parent of this wrapper.
94 """
95 return self._cached_parent
96
97 @property
98 def is_mapped(self):
99 """
100 ``True`` if this wrapper is mapped to an actual Galaxy entity.
101 """
102 return self.id is not None
103
104 def unmap(self):
105 """
106 Disconnect this wrapper from Galaxy.
107 """
108 object.__setattr__(self, 'id', None)
109
110 def clone(self):
111 """
112 Return an independent copy of this wrapper.
113 """
114 return self.__class__(self.wrapped)
115
116 def touch(self):
117 """
118 Mark this wrapper as having been modified since its creation.
119 """
120 object.__setattr__(self, 'is_modified', True)
121 if self.parent:
122 self.parent.touch()
123
124 def to_json(self):
125 """
126 Return a JSON dump of this wrapper.
127 """
128 return json.dumps(self.wrapped)
129
130 @classmethod
131 def from_json(cls, jdef):
132 """
133 Build a new wrapper from a JSON dump.
134 """
135 return cls(json.loads(jdef))
136
137 # FIXME: things like self.x[0] = 'y' do NOT call self.__setattr__
138 def __setattr__(self, name, value):
139 if name not in self.wrapped:
140 raise AttributeError("can't set attribute")
141 else:
142 self.wrapped[name] = value
143 object.__setattr__(self, name, value)
144 self.touch()
145
146 def __repr__(self):
147 return "%s(%r)" % (self.__class__.__name__, self.wrapped)
148
149
150 class Step(Wrapper):
151 """
152 Abstract base class for workflow steps.
153
154 Steps are the main building blocks of a Galaxy workflow. A step can be: an
155 input (type 'data_collection_input` or 'data_input`), a computational tool
156 (type 'tool`) or a pause (type 'pause`).
157 """
158 BASE_ATTRS = Wrapper.BASE_ATTRS + (
159 'input_steps', 'tool_id', 'tool_inputs', 'tool_version', 'type'
160 )
161
162 def __init__(self, step_dict, parent):
163 super(Step, self).__init__(step_dict, parent=parent, gi=parent.gi)
164 try:
165 stype = step_dict['type']
166 except KeyError:
167 raise ValueError('not a step dict')
168 if stype not in set(['data_collection_input', 'data_input', 'pause',
169 'tool']):
170 raise ValueError('Unknown step type: %r' % stype)
171 if self.type == 'tool' and self.tool_inputs:
172 for k, v in six.iteritems(self.tool_inputs):
173 # In Galaxy before release_17.05, v is a JSON-encoded string
174 if not isinstance(v, six.string_types):
175 break
176 try:
177 self.tool_inputs[k] = json.loads(v)
178 except ValueError:
179 break
180
181 @property
182 def gi_module(self):
183 return self.gi.workflows
184
185
186 class Workflow(Wrapper):
187 """
188 Workflows represent ordered sequences of computations on Galaxy.
189
190 A workflow defines a sequence of steps that produce one or more
191 results from an input dataset.
192 """
193 BASE_ATTRS = Wrapper.BASE_ATTRS + (
194 'deleted', 'inputs', 'published', 'steps', 'tags'
195 )
196 POLLING_INTERVAL = 10 # for output state monitoring
197
198 def __init__(self, wf_dict, gi=None):
199 super(Workflow, self).__init__(wf_dict, gi=gi)
200 missing_ids = []
201 if gi:
202 tools_list_by_id = [t.id for t in gi.tools.get_previews()]
203 else:
204 tools_list_by_id = []
205 tool_labels_to_ids = {}
206 for k, v in six.iteritems(self.steps):
207 # convert step ids to str for consistency with outer keys
208 v['id'] = str(v['id'])
209 for i in six.itervalues(v['input_steps']):
210 i['source_step'] = str(i['source_step'])
211 step = Step(v, self)
212 self.steps[k] = step
213 if step.type == 'tool':
214 if not step.tool_inputs or step.tool_id not in tools_list_by_id:
215 missing_ids.append(k)
216 tool_labels_to_ids.setdefault(step.tool_id, set()).add(step.id)
217 input_labels_to_ids = {}
218 for id_, d in six.iteritems(self.inputs):
219 input_labels_to_ids.setdefault(d['label'], set()).add(id_)
220 object.__setattr__(self, 'input_labels_to_ids', input_labels_to_ids)
221 object.__setattr__(self, 'tool_labels_to_ids', tool_labels_to_ids)
222 dag, inv_dag = self._get_dag()
223 heads, tails = set(dag), set(inv_dag)
224 object.__setattr__(self, 'dag', dag)
225 object.__setattr__(self, 'inv_dag', inv_dag)
226 object.__setattr__(self, 'source_ids', heads - tails)
227 assert set(self.inputs) == self.data_collection_input_ids | self.data_input_ids, \
228 "inputs is %r, while data_collection_input_ids is %r and data_input_ids is %r" % (self.inputs, self.data_collection_input_ids, self.data_input_ids)
229 object.__setattr__(self, 'sink_ids', tails - heads)
230 object.__setattr__(self, 'missing_ids', missing_ids)
231
232 @property
233 def gi_module(self):
234 return self.gi.workflows
235
236 def _get_dag(self):
237 """
238 Return the workflow's DAG.
239
240 For convenience, this method computes a 'direct' (step =>
241 successors) and an 'inverse' (step => predecessors)
242 representation of the same DAG.
243
244 For instance, a workflow with a single tool *c*, two inputs
245 *a, b* and three outputs *d, e, f* is represented by (direct)::
246
247 {'a': {'c'}, 'b': {'c'}, 'c': set(['d', 'e', 'f'])}
248
249 and by (inverse)::
250
251 {'c': set(['a', 'b']), 'd': {'c'}, 'e': {'c'}, 'f': {'c'}}
252 """
253 dag, inv_dag = {}, {}
254 for s in six.itervalues(self.steps):
255 for i in six.itervalues(s.input_steps):
256 head, tail = i['source_step'], s.id
257 dag.setdefault(head, set()).add(tail)
258 inv_dag.setdefault(tail, set()).add(head)
259 return dag, inv_dag
260
261 def sorted_step_ids(self):
262 """
263 Return a topological sort of the workflow's DAG.
264 """
265 ids = []
266 source_ids = self.source_ids.copy()
267 inv_dag = dict((k, v.copy()) for k, v in six.iteritems(self.inv_dag))
268 while source_ids:
269 head = source_ids.pop()
270 ids.append(head)
271 for tail in self.dag.get(head, []):
272 incoming = inv_dag[tail]
273 incoming.remove(head)
274 if not incoming:
275 source_ids.add(tail)
276 return ids
277
278 @property
279 def data_input_ids(self):
280 """
281 Return the ids of data input steps for this workflow.
282 """
283 return set(id_ for id_, s in six.iteritems(self.steps)
284 if s.type == 'data_input')
285
286 @property
287 def data_collection_input_ids(self):
288 """
289 Return the ids of data collection input steps for this workflow.
290 """
291 return set(id_ for id_, s in six.iteritems(self.steps)
292 if s.type == 'data_collection_input')
293
294 @property
295 def tool_ids(self):
296 """
297 Return the ids of tool steps for this workflow.
298 """
299 return set(id_ for id_, s in six.iteritems(self.steps)
300 if s.type == 'tool')
301
302 @property
303 def input_labels(self):
304 """
305 Return the labels of this workflow's input steps.
306 """
307 return set(self.input_labels_to_ids)
308
309 @property
310 def is_runnable(self):
311 """
312 Return True if the workflow can be run on Galaxy.
313
314 A workflow is considered runnable on a Galaxy instance if all
315 of the tools it uses are installed in that instance.
316 """
317 return not self.missing_ids
318
319 def convert_input_map(self, input_map):
320 """
321 Convert ``input_map`` to the format required by the Galaxy web API.
322
323 :type input_map: dict
324 :param input_map: a mapping from input labels to datasets
325
326 :rtype: dict
327 :return: a mapping from input slot ids to dataset ids in the
328 format required by the Galaxy web API.
329 """
330 m = {}
331 for label, slot_ids in six.iteritems(self.input_labels_to_ids):
332 datasets = input_map.get(label, [])
333 if not isinstance(datasets, collections.Iterable):
334 datasets = [datasets]
335 if len(datasets) < len(slot_ids):
336 raise RuntimeError('not enough datasets for "%s"' % label)
337 for id_, ds in zip(slot_ids, datasets):
338 m[id_] = {'id': ds.id, 'src': ds.SRC}
339 return m
340
341 def preview(self):
342 getf = self.gi.workflows.get_previews
343 try:
344 p = [_ for _ in getf(published=True) if _.id == self.id][0]
345 except IndexError:
346 raise ValueError('no object for id %s' % self.id)
347 return p
348
349 def run(self, input_map=None, history='', params=None, import_inputs=False,
350 replacement_params=None, wait=False,
351 polling_interval=POLLING_INTERVAL, break_on_error=True):
352 """
353 Run the workflow in the current Galaxy instance.
354
355 :type input_map: dict
356 :param input_map: a mapping from workflow input labels to
357 datasets, e.g.: ``dict(zip(workflow.input_labels,
358 library.get_datasets()))``
359
360 :type history: :class:`History` or str
361 :param history: either a valid history object (results will be
362 stored there) or a string (a new history will be created with
363 the given name).
364
365 :type params: dict
366 :param params: a mapping of non-datasets tool parameters (see below)
367
368 :type import_inputs: bool
369 :param import_inputs: If ``True``, workflow inputs will be imported into
370 the history; if ``False``, only workflow outputs will be visible in
371 the history.
372
373 :type replacement_params: dict
374 :param replacement_params: pattern-based replacements for
375 post-job actions (see the docs for
376 :meth:`~bioblend.galaxy.workflows.WorkflowClient.invoke_workflow`)
377
378 :type wait: bool
379 :param wait: whether to wait while the returned datasets are
380 in a pending state
381
382 :type polling_interval: float
383 :param polling_interval: polling interval in seconds
384
385 :type break_on_error: bool
386 :param break_on_error: whether to break as soon as at least one
387 of the returned datasets is in the 'error' state
388
389 :rtype: tuple
390 :return: list of output datasets, output history
391
392 The ``params`` dict should be specified as follows::
393
394 {STEP_ID: PARAM_DICT, ...}
395
396 where PARAM_DICT is::
397
398 {PARAM_NAME: VALUE, ...}
399
400 For backwards compatibility, the following (deprecated) format is
401 also supported for ``params``::
402
403 {TOOL_ID: PARAM_DICT, ...}
404
405 in which case PARAM_DICT affects all steps with the given tool id.
406 If both by-tool-id and by-step-id specifications are used, the
407 latter takes precedence.
408
409 Finally (again, for backwards compatibility), PARAM_DICT can also
410 be specified as::
411
412 {'param': PARAM_NAME, 'value': VALUE}
413
414 Note that this format allows only one parameter to be set per step.
415
416 Example: set 'a' to 1 for the third workflow step::
417
418 params = {workflow.steps[2].id: {'a': 1}}
419
420 .. warning::
421
422 This is a blocking operation that can take a very long time. If
423 ``wait`` is set to ``False``, the method will return as soon as the
424 workflow has been *scheduled*, otherwise it will wait until the
425 workflow has been *run*. With a large number of steps, however, the
426 delay may not be negligible even in the former case (e.g. minutes for
427 100 steps).
428 """
429 if not self.is_mapped:
430 raise RuntimeError('workflow is not mapped to a Galaxy object')
431 if not self.is_runnable:
432 raise RuntimeError('workflow has missing tools: %s' % ', '.join(
433 '%s[%s]' % (self.steps[_].tool_id, _)
434 for _ in self.missing_ids))
435 kwargs = {
436 'dataset_map': self.convert_input_map(input_map or {}),
437 'params': params,
438 'import_inputs_to_history': import_inputs,
439 'replacement_params': replacement_params,
440 }
441 if isinstance(history, History):
442 try:
443 kwargs['history_id'] = history.id
444 except AttributeError:
445 raise RuntimeError('history does not have an id')
446 elif isinstance(history, six.string_types):
447 kwargs['history_name'] = history
448 else:
449 raise TypeError(
450 'history must be either a history wrapper or a string')
451 res = self.gi.gi.workflows.run_workflow(self.id, **kwargs)
452 # res structure: {'history': HIST_ID, 'outputs': [CI_ID, CI_ID, ...]}
453 out_hist = self.gi.histories.get(res['history'])
454 content_infos_dict = dict()
455 for ci in out_hist.content_infos:
456 content_infos_dict[ci.id] = ci
457 outputs = []
458 for output_id in res['outputs']:
459 if content_infos_dict[output_id].type == 'file':
460 outputs.append(out_hist.get_dataset(output_id))
461 elif content_infos_dict[output_id].type == 'collection':
462 outputs.append(out_hist.get_dataset_collection(output_id))
463
464 if wait:
465 self.gi._wait_datasets(outputs, polling_interval=polling_interval,
466 break_on_error=break_on_error)
467 return outputs, out_hist
468
469 def export(self):
470 """
471 Export a re-importable representation of the workflow.
472
473 :rtype: dict
474 :return: a JSON-serializable dump of the workflow
475 """
476 return self.gi.gi.workflows.export_workflow_dict(self.id)
477
478 def delete(self):
479 """
480 Delete this workflow.
481
482 .. warning::
483 Deleting a workflow is irreversible - all of the data from
484 the workflow will be permanently deleted.
485 """
486 self.gi.workflows.delete(id_=self.id)
487 self.unmap()
488
489
490 @six.add_metaclass(abc.ABCMeta)
491 class Dataset(Wrapper):
492 """
493 Abstract base class for Galaxy datasets.
494 """
495 BASE_ATTRS = Wrapper.BASE_ATTRS + (
496 'data_type', 'file_ext', 'file_name', 'file_size', 'genome_build', 'misc_info', 'state'
497 )
498 POLLING_INTERVAL = 1 # for state monitoring
499
500 @abc.abstractmethod
501 def __init__(self, ds_dict, container, gi=None):
502 super(Dataset, self).__init__(ds_dict, gi=gi)
503 object.__setattr__(self, 'container', container)
504
505 @property
506 def container_id(self):
507 """
508 Deprecated property.
509
510 Id of the dataset container. Use :attr:`.container.id` instead.
511 """
512 return self.container.id
513
514 @abc.abstractproperty
515 def _stream_url(self):
516 """
517 Return the URL to stream this dataset.
518 """
519 pass
520
521 def get_stream(self, chunk_size=bioblend.CHUNK_SIZE):
522 """
523 Open dataset for reading and return an iterator over its contents.
524
525 :type chunk_size: int
526 :param chunk_size: read this amount of bytes at a time
527 """
528 kwargs = {'stream': True}
529 if isinstance(self, LibraryDataset):
530 kwargs['params'] = {'ld_ids%5B%5D': self.id}
531 r = self.gi.gi.make_get_request(self._stream_url, **kwargs)
532 if isinstance(self, LibraryDataset) and r.status_code == 500:
533 # compatibility with older Galaxy releases
534 kwargs['params'] = {'ldda_ids%5B%5D': self.id}
535 r = self.gi.gi.make_get_request(self._stream_url, **kwargs)
536 r.raise_for_status()
537 return r.iter_content(chunk_size) # FIXME: client can't close r
538
539 def peek(self, chunk_size=bioblend.CHUNK_SIZE):
540 """
541 Open dataset for reading and return the first chunk.
542
543 See :meth:`.get_stream` for param info.
544 """
545 try:
546 return next(self.get_stream(chunk_size=chunk_size))
547 except StopIteration:
548 return b''
549
550 def download(self, file_object, chunk_size=bioblend.CHUNK_SIZE):
551 """
552 Open dataset for reading and save its contents to ``file_object``.
553
554 :type file_object: file
555 :param file_object: output file object
556
557 See :meth:`.get_stream` for info on other params.
558 """
559 for chunk in self.get_stream(chunk_size=chunk_size):
560 file_object.write(chunk)
561
562 def get_contents(self, chunk_size=bioblend.CHUNK_SIZE):
563 """
564 Open dataset for reading and return its **full** contents.
565
566 See :meth:`.get_stream` for param info.
567 """
568 return b''.join(self.get_stream(chunk_size=chunk_size))
569
570 def refresh(self):
571 """
572 Re-fetch the attributes pertaining to this object.
573
574 Returns: self
575 """
576 gi_client = getattr(self.gi.gi, self.container.API_MODULE)
577 ds_dict = gi_client.show_dataset(self.container.id, self.id)
578 self.__init__(ds_dict, self.container, self.gi)
579 return self
580
581 def wait(self, polling_interval=POLLING_INTERVAL, break_on_error=True):
582 """
583 Wait for this dataset to come out of the pending states.
584
585 :type polling_interval: float
586 :param polling_interval: polling interval in seconds
587
588 :type break_on_error: bool
589 :param break_on_error: if ``True``, raise a RuntimeError exception if
590 the dataset ends in the 'error' state.
591
592 .. warning::
593
594 This is a blocking operation that can take a very long time. Also,
595 note that this method does not return anything; however, this dataset
596 is refreshed (possibly multiple times) during the execution.
597 """
598 self.gi._wait_datasets([self], polling_interval=polling_interval,
599 break_on_error=break_on_error)
600
601
602 class HistoryDatasetAssociation(Dataset):
603 """
604 Maps to a Galaxy ``HistoryDatasetAssociation``.
605 """
606 BASE_ATTRS = Dataset.BASE_ATTRS + ('annotation', 'deleted', 'purged', 'tags', 'visible')
607 SRC = 'hda'
608
609 def __init__(self, ds_dict, container, gi=None):
610 super(HistoryDatasetAssociation, self).__init__(
611 ds_dict, container, gi=gi)
612
613 @property
614 def gi_module(self):
615 return self.gi.histories
616
617 @property
618 def _stream_url(self):
619 base_url = self.gi.gi._make_url(
620 self.gi.gi.histories, module_id=self.container.id, contents=True)
621 return "%s/%s/display" % (base_url, self.id)
622
623 def update(self, **kwds):
624 """
625 Update this history dataset metadata. Some of the attributes that can be
626 modified are documented below.
627
628 :type name: str
629 :param name: Replace history dataset name with the given string
630
631 :type genome_build: str
632 :param genome_build: Replace history dataset genome build (dbkey)
633
634 :type annotation: str
635 :param annotation: Replace history dataset annotation with given string
636
637 :type deleted: bool
638 :param deleted: Mark or unmark history dataset as deleted
639
640 :type visible: bool
641 :param visible: Mark or unmark history dataset as visible
642 """
643 res = self.gi.gi.histories.update_dataset(self.container.id, self.id, **kwds)
644 # Refresh also the history because the dataset may have been (un)deleted
645 self.container.refresh()
646 if 'id' in res:
647 self.__init__(res, self.container, gi=self.gi)
648 else:
649 # for Galaxy < release_15.03 res contains only the updated fields
650 self.refresh()
651 return self
652
653 def delete(self, purge=False):
654 """
655 Delete this history dataset.
656
657 :type purge: bool
658 :param purge: if ``True``, also purge (permanently delete) the dataset
659
660 .. note::
661 For the purge option to work, the Galaxy instance must have the
662 ``allow_user_dataset_purge`` option set to ``true`` in the
663 ``config/galaxy.yml`` configuration file.
664
665 .. warning::
666 If you purge a dataset which has not been previously deleted,
667 Galaxy from release_15.03 to release_17.01 does not set the
668 ``deleted`` attribute of the dataset to ``True``, see
669 https://github.com/galaxyproject/galaxy/issues/3548
670 """
671 self.gi.gi.histories.delete_dataset(self.container.id, self.id, purge=purge)
672 self.container.refresh()
673 self.refresh()
674
675
676 @six.add_metaclass(abc.ABCMeta)
677 class DatasetCollection(Wrapper):
678 """
679 Abstract base class for Galaxy dataset collections.
680 """
681 BASE_ATTRS = Wrapper.BASE_ATTRS + (
682 'state', 'deleted', 'collection_type'
683 )
684
685 @abc.abstractmethod
686 def __init__(self, dsc_dict, container, gi=None):
687 super(DatasetCollection, self).__init__(dsc_dict, gi=gi)
688 object.__setattr__(self, 'container', container)
689
690 def refresh(self):
691 """
692 Re-fetch the attributes pertaining to this object.
693
694 Returns: self
695 """
696 gi_client = getattr(self.gi.gi, self.container.API_MODULE)
697 dsc_dict = gi_client.show_dataset_collection(self.container.id, self.id)
698 self.__init__(dsc_dict, self.container, self.gi)
699 return self
700
701
702 class HistoryDatasetCollectionAssociation(DatasetCollection):
703 """
704 Maps to a Galaxy ``HistoryDatasetCollectionAssociation``.
705 """
706 BASE_ATTRS = DatasetCollection.BASE_ATTRS + ('tags', 'visible', 'elements')
707 SRC = 'hdca'
708
709 def __init__(self, dsc_dict, container, gi=None):
710 super(HistoryDatasetCollectionAssociation, self).__init__(
711 dsc_dict, container, gi=gi)
712
713 @property
714 def gi_module(self):
715 return self.gi.histories
716
717 def delete(self):
718 """
719 Delete this dataset collection.
720 """
721 self.gi.gi.histories.delete_dataset_collection(self.container.id, self.id)
722 self.container.refresh()
723 self.refresh()
724
725
726 class LibRelatedDataset(Dataset):
727 """
728 Base class for LibraryDatasetDatasetAssociation and LibraryDataset classes.
729 """
730
731 def __init__(self, ds_dict, container, gi=None):
732 super(LibRelatedDataset, self).__init__(ds_dict, container, gi=gi)
733
734 @property
735 def gi_module(self):
736 return self.gi.libraries
737
738 @property
739 def _stream_url(self):
740 base_url = self.gi.gi._make_url(self.gi.gi.libraries)
741 return "%s/datasets/download/uncompressed" % base_url
742
743
744 class LibraryDatasetDatasetAssociation(LibRelatedDataset):
745 """
746 Maps to a Galaxy ``LibraryDatasetDatasetAssociation``.
747 """
748 BASE_ATTRS = LibRelatedDataset.BASE_ATTRS + ('deleted',)
749 SRC = 'ldda'
750
751
752 class LibraryDataset(LibRelatedDataset):
753 """
754 Maps to a Galaxy ``LibraryDataset``.
755 """
756 SRC = 'ld'
757
758 def delete(self, purged=False):
759 """
760 Delete this library dataset.
761
762 :type purged: bool
763 :param purged: if ``True``, also purge (permanently delete) the dataset
764 """
765 self.gi.gi.libraries.delete_library_dataset(
766 self.container.id, self.id, purged=purged)
767 self.container.refresh()
768 self.refresh()
769
770 def update(self, **kwds):
771 """
772 Update this library dataset metadata. Some of the attributes that can be
773 modified are documented below.
774
775 :type name: str
776 :param name: Replace history dataset name with the given string
777
778 :type genome_build: str
779 :param genome_build: Replace history dataset genome build (dbkey)
780 """
781 res = self.gi.gi.libraries.update_library_dataset(self.id, **kwds)
782 self.container.refresh()
783 self.__init__(res, self.container, gi=self.gi)
784 return self
785
786
787 @six.add_metaclass(abc.ABCMeta)
788 class ContentInfo(Wrapper):
789 """
790 Instances of this class wrap dictionaries obtained by getting
791 ``/api/{histories,libraries}/<ID>/contents`` from Galaxy.
792 """
793 BASE_ATTRS = Wrapper.BASE_ATTRS + ('type',)
794
795 @abc.abstractmethod
796 def __init__(self, info_dict, gi=None):
797 super(ContentInfo, self).__init__(info_dict, gi=gi)
798
799
800 class LibraryContentInfo(ContentInfo):
801 """
802 Instances of this class wrap dictionaries obtained by getting
803 ``/api/libraries/<ID>/contents`` from Galaxy.
804 """
805 def __init__(self, info_dict, gi=None):
806 super(LibraryContentInfo, self).__init__(info_dict, gi=gi)
807
808 @property
809 def gi_module(self):
810 return self.gi.libraries
811
812
813 class HistoryContentInfo(ContentInfo):
814 """
815 Instances of this class wrap dictionaries obtained by getting
816 ``/api/histories/<ID>/contents`` from Galaxy.
817 """
818 BASE_ATTRS = ContentInfo.BASE_ATTRS + ('deleted', 'state', 'visible')
819
820 def __init__(self, info_dict, gi=None):
821 super(HistoryContentInfo, self).__init__(info_dict, gi=gi)
822
823 @property
824 def gi_module(self):
825 return self.gi.histories
826
827
828 @six.add_metaclass(abc.ABCMeta)
829 class DatasetContainer(Wrapper):
830 """
831 Abstract base class for dataset containers (histories and libraries).
832 """
833 BASE_ATTRS = Wrapper.BASE_ATTRS + ('deleted',)
834
835 @abc.abstractmethod
836 def __init__(self, c_dict, content_infos=None, gi=None):
837 """
838 :type content_infos: list of :class:`ContentInfo`
839 :param content_infos: info objects for the container's contents
840 """
841 super(DatasetContainer, self).__init__(c_dict, gi=gi)
842 if content_infos is None:
843 content_infos = []
844 object.__setattr__(self, 'content_infos', content_infos)
845
846 @property
847 def dataset_ids(self):
848 """
849 Return the ids of the contained datasets.
850 """
851 return [_.id for _ in self.content_infos if _.type == 'file']
852
853 def preview(self):
854 getf = self.gi_module.get_previews
855 # self.state could be stale: check both regular and deleted containers
856 try:
857 p = [_ for _ in getf() if _.id == self.id][0]
858 except IndexError:
859 try:
860 p = [_ for _ in getf(deleted=True) if _.id == self.id][0]
861 except IndexError:
862 raise ValueError('no object for id %s' % self.id)
863 return p
864
865 def refresh(self):
866 """
867 Re-fetch the attributes pertaining to this object.
868
869 Returns: self
870 """
871 fresh = self.gi_module.get(self.id)
872 self.__init__(
873 fresh.wrapped, content_infos=fresh.content_infos, gi=self.gi)
874 return self
875
876 def get_dataset(self, ds_id):
877 """
878 Retrieve the dataset corresponding to the given id.
879
880 :type ds_id: str
881 :param ds_id: dataset id
882
883 :rtype: :class:`~.HistoryDatasetAssociation` or
884 :class:`~.LibraryDataset`
885 :return: the dataset corresponding to ``ds_id``
886 """
887 gi_client = getattr(self.gi.gi, self.API_MODULE)
888 ds_dict = gi_client.show_dataset(self.id, ds_id)
889 return self.DS_TYPE(ds_dict, self, gi=self.gi)
890
891 def get_datasets(self, name=None):
892 """
893 Get all datasets contained inside this dataset container.
894
895 :type name: str
896 :param name: return only datasets with this name
897
898 :rtype: list of :class:`~.HistoryDatasetAssociation` or list of
899 :class:`~.LibraryDataset`
900 :return: datasets with the given name contained inside this
901 container
902
903 .. note::
904
905 when filtering library datasets by name, specify their full
906 paths starting from the library's root folder, e.g.,
907 ``/seqdata/reads.fastq``. Full paths are available through
908 the ``content_infos`` attribute of
909 :class:`~.Library` objects.
910 """
911 if name is None:
912 ds_ids = self.dataset_ids
913 else:
914 ds_ids = [_.id for _ in self.content_infos if _.name == name]
915 return [self.get_dataset(_) for _ in ds_ids]
916
917
918 class History(DatasetContainer):
919 """
920 Maps to a Galaxy history.
921 """
922 BASE_ATTRS = DatasetContainer.BASE_ATTRS + ('annotation', 'published', 'state', 'state_ids', 'state_details', 'tags')
923 DS_TYPE = HistoryDatasetAssociation
924 DSC_TYPE = HistoryDatasetCollectionAssociation
925 CONTENT_INFO_TYPE = HistoryContentInfo
926 API_MODULE = 'histories'
927
928 def __init__(self, hist_dict, content_infos=None, gi=None):
929 super(History, self).__init__(
930 hist_dict, content_infos=content_infos, gi=gi)
931
932 @property
933 def gi_module(self):
934 return self.gi.histories
935
936 def update(self, **kwds):
937 """
938 Update history metadata information. Some of the attributes that can be
939 modified are documented below.
940
941 :type name: str
942 :param name: Replace history name with the given string
943
944 :type annotation: str
945 :param annotation: Replace history annotation with the given string
946
947 :type deleted: bool
948 :param deleted: Mark or unmark history as deleted
949
950 :type purged: bool
951 :param purged: If True, mark history as purged (permanently deleted).
952 Ignored on Galaxy release_15.01 and earlier
953
954 :type published: bool
955 :param published: Mark or unmark history as published
956
957 :type importable: bool
958 :param importable: Mark or unmark history as importable
959
960 :type tags: list
961 :param tags: Replace history tags with the given list
962 """
963 # TODO: wouldn't it be better if name and annotation were attributes?
964 self.gi.gi.histories.update_history(self.id, **kwds)
965 self.refresh()
966 return self
967
968 def delete(self, purge=False):
969 """
970 Delete this history.
971
972 :type purge: bool
973 :param purge: if ``True``, also purge (permanently delete) the history
974
975 .. note::
976 For the purge option to work, the Galaxy instance must have the
977 ``allow_user_dataset_purge`` option set to ``true`` in the
978 ``config/galaxy.yml`` configuration file.
979 """
980 self.gi.histories.delete(id_=self.id, purge=purge)
981 try:
982 self.refresh()
983 except Exception:
984 # Galaxy release_15.01 and earlier requires passing 'deleted=False'
985 # when getting the details of a deleted history
986 pass
987 self.unmap()
988
989 def import_dataset(self, lds):
990 """
991 Import a dataset into the history from a library.
992
993 :type lds: :class:`~.LibraryDataset`
994 :param lds: the library dataset to import
995
996 :rtype: :class:`~.HistoryDatasetAssociation`
997 :return: the imported history dataset
998 """
999 if not self.is_mapped:
1000 raise RuntimeError('history is not mapped to a Galaxy object')
1001 if not isinstance(lds, LibraryDataset):
1002 raise TypeError('lds is not a LibraryDataset')
1003 res = self.gi.gi.histories.upload_dataset_from_library(self.id, lds.id)
1004 if not isinstance(res, collections.Mapping):
1005 raise RuntimeError(
1006 'upload_dataset_from_library: unexpected reply: %r' % res)
1007 self.refresh()
1008 return self.get_dataset(res['id'])
1009
1010 def upload_file(self, path, **kwargs):
1011 """
1012 Upload the file specified by ``path`` to this history.
1013
1014 :type path: str
1015 :param path: path of the file to upload
1016
1017 See :meth:`~bioblend.galaxy.tools.ToolClient.upload_file` for
1018 the optional parameters.
1019
1020 :rtype: :class:`~.HistoryDatasetAssociation`
1021 :return: the uploaded dataset
1022 """
1023 out_dict = self.gi.gi.tools.upload_file(path, self.id, **kwargs)
1024 self.refresh()
1025 return self.get_dataset(out_dict['outputs'][0]['id'])
1026
1027 upload_dataset = upload_file
1028
1029 def upload_from_ftp(self, path, **kwargs):
1030 """
1031 Upload the file specified by ``path`` from the user's FTP directory to
1032 this history.
1033
1034 :type path: str
1035 :param path: path of the file in the user's FTP directory
1036
1037 See :meth:`~bioblend.galaxy.tools.ToolClient.upload_file` for
1038 the optional parameters.
1039
1040 :rtype: :class:`~.HistoryDatasetAssociation`
1041 :return: the uploaded dataset
1042 """
1043 out_dict = self.gi.gi.tools.upload_from_ftp(path, self.id, **kwargs)
1044 self.refresh()
1045 return self.get_dataset(out_dict['outputs'][0]['id'])
1046
1047 def paste_content(self, content, **kwargs):
1048 """
1049 Upload a string to a new dataset in this history.
1050
1051 :type content: str
1052 :param content: content of the new dataset to upload
1053
1054 See :meth:`~bioblend.galaxy.tools.ToolClient.upload_file` for
1055 the optional parameters (except file_name).
1056
1057 :rtype: :class:`~.HistoryDatasetAssociation`
1058 :return: the uploaded dataset
1059 """
1060 out_dict = self.gi.gi.tools.paste_content(content, self.id, **kwargs)
1061 self.refresh()
1062 return self.get_dataset(out_dict['outputs'][0]['id'])
1063
1064 def export(self, gzip=True, include_hidden=False, include_deleted=False,
1065 wait=False, maxwait=None):
1066 """
1067 Start a job to create an export archive for this history. See
1068 :meth:`~bioblend.galaxy.histories.HistoryClient.export_history`
1069 for parameter and return value info.
1070 """
1071 return self.gi.gi.histories.export_history(
1072 self.id, gzip=gzip, include_hidden=include_hidden,
1073 include_deleted=include_deleted, wait=wait, maxwait=maxwait)
1074
1075 def download(self, jeha_id, outf, chunk_size=bioblend.CHUNK_SIZE):
1076 """
1077 Download an export archive for this history. Use :meth:`export`
1078 to create an export and get the required ``jeha_id``. See
1079 :meth:`~bioblend.galaxy.histories.HistoryClient.download_history`
1080 for parameter and return value info.
1081 """
1082 return self.gi.gi.histories.download_history(
1083 self.id, jeha_id, outf, chunk_size=chunk_size)
1084
1085 def create_dataset_collection(self, collection_description):
1086 """
1087 Create a new dataset collection in the history by providing a collection description.
1088
1089 :type collection_description: bioblend.galaxy.dataset_collections.CollectionDescription
1090 :param collection_description: a description of the dataset collection
1091
1092 :rtype: :class:`~.HistoryDatasetCollectionAssociation`
1093 :return: the new dataset collection
1094 """
1095 dataset_collection = self.gi.gi.histories.create_dataset_collection(self.id, collection_description)
1096 self.refresh()
1097 return self.get_dataset_collection(dataset_collection['id'])
1098
1099 def get_dataset_collection(self, dsc_id):
1100 """
1101 Retrieve the dataset collection corresponding to the given id.
1102
1103 :type dsc_id: str
1104 :param dsc_id: dataset collection id
1105
1106 :rtype: :class:`~.HistoryDatasetCollectionAssociation`
1107 :return: the dataset collection corresponding to ``dsc_id``
1108 """
1109 dsc_dict = self.gi.gi.histories.show_dataset_collection(self.id, dsc_id)
1110 return self.DSC_TYPE(dsc_dict, self, gi=self.gi)
1111
1112
1113 class Library(DatasetContainer):
1114 """
1115 Maps to a Galaxy library.
1116 """
1117 BASE_ATTRS = DatasetContainer.BASE_ATTRS + ('description', 'synopsis')
1118 DS_TYPE = LibraryDataset
1119 CONTENT_INFO_TYPE = LibraryContentInfo
1120 API_MODULE = 'libraries'
1121
1122 def __init__(self, lib_dict, content_infos=None, gi=None):
1123 super(Library, self).__init__(
1124 lib_dict, content_infos=content_infos, gi=gi)
1125
1126 @property
1127 def gi_module(self):
1128 return self.gi.libraries
1129
1130 @property
1131 def folder_ids(self):
1132 """
1133 Return the ids of the contained folders.
1134 """
1135 return [_.id for _ in self.content_infos if _.type == 'folder']
1136
1137 def delete(self):
1138 """
1139 Delete this library.
1140 """
1141 self.gi.libraries.delete(id_=self.id)
1142 self.refresh()
1143 self.unmap()
1144
1145 def _pre_upload(self, folder):
1146 """
1147 Return the id of the given folder, after sanity checking.
1148 """
1149 if not self.is_mapped:
1150 raise RuntimeError('library is not mapped to a Galaxy object')
1151 return None if folder is None else folder.id
1152
1153 def upload_data(self, data, folder=None, **kwargs):
1154 """
1155 Upload data to this library.
1156
1157 :type data: str
1158 :param data: dataset contents
1159
1160 :type folder: :class:`~.Folder`
1161 :param folder: a folder object, or ``None`` to upload to the root folder
1162
1163 :rtype: :class:`~.LibraryDataset`
1164 :return: the dataset object that represents the uploaded content
1165
1166 Optional keyword arguments: ``file_type``, ``dbkey``.
1167 """
1168 fid = self._pre_upload(folder)
1169 res = self.gi.gi.libraries.upload_file_contents(
1170 self.id, data, folder_id=fid, **kwargs)
1171 self.refresh()
1172 return self.get_dataset(res[0]['id'])
1173
1174 def upload_from_url(self, url, folder=None, **kwargs):
1175 """
1176 Upload data to this library from the given URL.
1177
1178 :type url: str
1179 :param url: URL from which data should be read
1180
1181 See :meth:`.upload_data` for info on other params.
1182 """
1183 fid = self._pre_upload(folder)
1184 res = self.gi.gi.libraries.upload_file_from_url(
1185 self.id, url, folder_id=fid, **kwargs)
1186 self.refresh()
1187 return self.get_dataset(res[0]['id'])
1188
1189 def upload_from_local(self, path, folder=None, **kwargs):
1190 """
1191 Upload data to this library from a local file.
1192
1193 :type path: str
1194 :param path: local file path from which data should be read
1195
1196 See :meth:`.upload_data` for info on other params.
1197 """
1198 fid = self._pre_upload(folder)
1199 res = self.gi.gi.libraries.upload_file_from_local_path(
1200 self.id, path, folder_id=fid, **kwargs)
1201 self.refresh()
1202 return self.get_dataset(res[0]['id'])
1203
1204 def upload_from_galaxy_fs(self, paths, folder=None, link_data_only=None, **kwargs):
1205 """
1206 Upload data to this library from filesystem paths on the server.
1207
1208 .. note::
1209 For this method to work, the Galaxy instance must have the
1210 ``allow_path_paste`` (``allow_library_path_paste`` in Galaxy
1211 ``release_17.05`` and earlier) option set to ``true`` in the
1212 ``config/galaxy.yml`` configuration file.
1213
1214 :type paths: str or :class:`~collections.Iterable` of str
1215 :param paths: server-side file paths from which data should be read
1216
1217 :type link_data_only: str
1218 :param link_data_only: either 'copy_files' (default) or
1219 'link_to_files'. Setting to 'link_to_files' symlinks instead of
1220 copying the files
1221
1222 :rtype: list of :class:`~.LibraryDataset`
1223 :return: the dataset objects that represent the uploaded content
1224
1225 See :meth:`.upload_data` for info on other params.
1226 """
1227 fid = self._pre_upload(folder)
1228 if isinstance(paths, six.string_types):
1229 paths = (paths,)
1230 paths = '\n'.join(paths)
1231 res = self.gi.gi.libraries.upload_from_galaxy_filesystem(
1232 self.id, paths, folder_id=fid, link_data_only=link_data_only,
1233 **kwargs)
1234 if res is None:
1235 raise RuntimeError('upload_from_galaxy_filesystem: no reply')
1236 if not isinstance(res, collections.Sequence):
1237 raise RuntimeError(
1238 'upload_from_galaxy_filesystem: unexpected reply: %r' % res)
1239 new_datasets = [
1240 self.get_dataset(ds_info['id']) for ds_info in res
1241 ]
1242 self.refresh()
1243 return new_datasets
1244
1245 def copy_from_dataset(self, hda, folder=None, message=''):
1246 """
1247 Copy a history dataset into this library.
1248
1249 :type hda: :class:`~.HistoryDatasetAssociation`
1250 :param hda: history dataset to copy into the library
1251
1252 See :meth:`.upload_data` for info on other params.
1253 """
1254 fid = self._pre_upload(folder)
1255 res = self.gi.gi.libraries.copy_from_dataset(
1256 self.id, hda.id, folder_id=fid, message=message)
1257 self.refresh()
1258 return self.get_dataset(res['library_dataset_id'])
1259
1260 def create_folder(self, name, description=None, base_folder=None):
1261 """
1262 Create a folder in this library.
1263
1264 :type name: str
1265 :param name: folder name
1266
1267 :type description: str
1268 :param description: optional folder description
1269
1270 :type base_folder: :class:`~.Folder`
1271 :param base_folder: parent folder, or ``None`` to create in the root
1272 folder
1273
1274 :rtype: :class:`~.Folder`
1275 :return: the folder just created
1276 """
1277 bfid = None if base_folder is None else base_folder.id
1278 res = self.gi.gi.libraries.create_folder(
1279 self.id, name, description=description, base_folder_id=bfid)
1280 self.refresh()
1281 return self.get_folder(res[0]['id'])
1282
1283 def get_folder(self, f_id):
1284 """
1285 Retrieve the folder corresponding to the given id.
1286
1287 :rtype: :class:`~.Folder`
1288 :return: the folder corresponding to ``f_id``
1289 """
1290 f_dict = self.gi.gi.libraries.show_folder(self.id, f_id)
1291 return Folder(f_dict, self, gi=self.gi)
1292
1293 @property
1294 def root_folder(self):
1295 """
1296 The root folder of this library.
1297
1298 :rtype: :class:`~.Folder`
1299 :return: the root folder of this library
1300 """
1301 return self.get_folder(self.gi.gi.libraries._get_root_folder_id(self.id))
1302
1303
1304 class Folder(Wrapper):
1305 """
1306 Maps to a folder in a Galaxy library.
1307 """
1308 BASE_ATTRS = Wrapper.BASE_ATTRS + ('description', 'deleted', 'item_count')
1309
1310 def __init__(self, f_dict, container, gi=None):
1311 super(Folder, self).__init__(f_dict, gi=gi)
1312 object.__setattr__(self, 'container', container)
1313
1314 @property
1315 def parent(self):
1316 """
1317 The parent folder of this folder. The parent of the root folder is
1318 ``None``.
1319
1320 :rtype: :class:`~.Folder`
1321 :return: the parent of this folder
1322 """
1323 if self._cached_parent is None:
1324 object.__setattr__(self,
1325 '_cached_parent',
1326 self._get_parent())
1327 return self._cached_parent
1328
1329 def _get_parent(self):
1330 """
1331 Return the parent folder of this folder.
1332 """
1333 # Galaxy release_13.04 and earlier does not have parent_id in the folder
1334 # dictionary, may be implemented by searching for the folder with the
1335 # correct name
1336 if 'parent_id' not in self.wrapped:
1337 raise NotImplementedError('This method has not been implemented for Galaxy release_13.04 and earlier')
1338 parent_id = self.wrapped['parent_id']
1339 if parent_id is None:
1340 return None
1341 # Galaxy from release_14.02 to release_15.01 returns a dummy parent_id
1342 # for the root folder instead of None, so check if this is the root
1343 if self.id == self.gi.gi.libraries._get_root_folder_id(self.container.id):
1344 return None
1345 # Galaxy release_13.11 and earlier returns a parent_id without the
1346 # initial 'F'
1347 if not parent_id.startswith('F'):
1348 parent_id = 'F' + parent_id
1349 return self.container.get_folder(parent_id)
1350
1351 @property
1352 def gi_module(self):
1353 return self.gi.libraries
1354
1355 @property
1356 def container_id(self):
1357 """
1358 Deprecated property.
1359
1360 Id of the folder container. Use :attr:`.container.id` instead.
1361 """
1362 return self.container.id
1363
1364 def refresh(self):
1365 """
1366 Re-fetch the attributes pertaining to this object.
1367
1368 Returns: self
1369 """
1370 f_dict = self.gi.gi.libraries.show_folder(self.container.id, self.id)
1371 self.__init__(f_dict, self.container, gi=self.gi)
1372 return self
1373
1374
1375 class Tool(Wrapper):
1376 """
1377 Maps to a Galaxy tool.
1378 """
1379 BASE_ATTRS = Wrapper.BASE_ATTRS + ('version',)
1380 POLLING_INTERVAL = 10 # for output state monitoring
1381
1382 def __init__(self, t_dict, gi=None):
1383 super(Tool, self).__init__(t_dict, gi=gi)
1384
1385 @property
1386 def gi_module(self):
1387 return self.gi.tools
1388
1389 def run(self, inputs, history, wait=False,
1390 polling_interval=POLLING_INTERVAL):
1391 """
1392 Execute this tool in the given history with inputs from dict
1393 ``inputs``.
1394
1395 :type inputs: dict
1396 :param inputs: dictionary of input datasets and parameters for
1397 the tool (see below)
1398
1399 :type history: :class:`History`
1400 :param history: the history where to execute the tool
1401
1402 :type wait: bool
1403 :param wait: whether to wait while the returned datasets are
1404 in a pending state
1405
1406 :type polling_interval: float
1407 :param polling_interval: polling interval in seconds
1408
1409 :rtype: list of :class:`HistoryDatasetAssociation`
1410 :return: list of output datasets
1411
1412 The ``inputs`` dict should contain input datasets and parameters
1413 in the (largely undocumented) format used by the Galaxy API.
1414 Some examples can be found in `Galaxy's API test suite
1415 <https://github.com/galaxyproject/galaxy/blob/dev/test/api/test_tools.py>`_.
1416 The value of an input dataset can also be a :class:`Dataset`
1417 object, which will be automatically converted to the needed
1418 format.
1419 """
1420 for k, v in six.iteritems(inputs):
1421 if isinstance(v, Dataset):
1422 inputs[k] = {'src': v.SRC, 'id': v.id}
1423 out_dict = self.gi.gi.tools.run_tool(history.id, self.id, inputs)
1424 outputs = [history.get_dataset(_['id']) for _ in out_dict['outputs']]
1425 if wait:
1426 self.gi._wait_datasets(outputs, polling_interval=polling_interval)
1427 return outputs
1428
1429
1430 class Job(Wrapper):
1431 """
1432 Maps to a Galaxy job.
1433 """
1434 BASE_ATTRS = ('id', 'state')
1435
1436 def __init__(self, j_dict, gi=None):
1437 super(Job, self).__init__(j_dict, gi=gi)
1438
1439 @property
1440 def gi_module(self):
1441 return self.gi.jobs
1442
1443
1444 @six.add_metaclass(abc.ABCMeta)
1445 class Preview(Wrapper):
1446 """
1447 Abstract base class for Galaxy entity 'previews'.
1448
1449 Classes derived from this one model the short summaries returned
1450 by global getters such as ``/api/libraries``.
1451 """
1452 BASE_ATTRS = Wrapper.BASE_ATTRS + ('deleted',)
1453
1454 @abc.abstractmethod
1455 def __init__(self, pw_dict, gi=None):
1456 super(Preview, self).__init__(pw_dict, gi=gi)
1457
1458
1459 class LibraryPreview(Preview):
1460 """
1461 Models Galaxy library 'previews'.
1462
1463 Instances of this class wrap dictionaries obtained by getting
1464 ``/api/libraries`` from Galaxy.
1465 """
1466 def __init__(self, pw_dict, gi=None):
1467 super(LibraryPreview, self).__init__(pw_dict, gi=gi)
1468
1469 @property
1470 def gi_module(self):
1471 return self.gi.libraries
1472
1473
1474 class HistoryPreview(Preview):
1475 """
1476 Models Galaxy history 'previews'.
1477
1478 Instances of this class wrap dictionaries obtained by getting
1479 ``/api/histories`` from Galaxy.
1480 """
1481 BASE_ATTRS = Preview.BASE_ATTRS + ('tags',)
1482
1483 def __init__(self, pw_dict, gi=None):
1484 super(HistoryPreview, self).__init__(pw_dict, gi=gi)
1485
1486 @property
1487 def gi_module(self):
1488 return self.gi.histories
1489
1490
1491 class WorkflowPreview(Preview):
1492 """
1493 Models Galaxy workflow 'previews'.
1494
1495 Instances of this class wrap dictionaries obtained by getting
1496 ``/api/workflows`` from Galaxy.
1497 """
1498 BASE_ATTRS = Preview.BASE_ATTRS + ('published', 'tags')
1499
1500 def __init__(self, pw_dict, gi=None):
1501 super(WorkflowPreview, self).__init__(pw_dict, gi=gi)
1502
1503 @property
1504 def gi_module(self):
1505 return self.gi.workflows
1506
1507
1508 class JobPreview(Preview):
1509 """
1510 Models Galaxy job 'previews'.
1511
1512 Instances of this class wrap dictionaries obtained by getting
1513 ``/api/jobs`` from Galaxy.
1514 """
1515 BASE_ATTRS = ('id', 'state')
1516
1517 def __init__(self, pw_dict, gi=None):
1518 super(JobPreview, self).__init__(pw_dict, gi=gi)
1519
1520 @property
1521 def gi_module(self):
1522 return self.gi.jobs