Mercurial > repos > shellac > guppy_basecaller
comparison env/lib/python3.7/site-packages/galaxy/util/dbkeys.py @ 0:26e78fe6e8c4 draft
"planemo upload commit c699937486c35866861690329de38ec1a5d9f783"
author | shellac |
---|---|
date | Sat, 02 May 2020 07:14:21 -0400 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:26e78fe6e8c4 |
---|---|
1 """ | |
2 Functionality for dealing with dbkeys. | |
3 """ | |
4 # dbkeys read from disk using builds.txt | |
5 from __future__ import absolute_import | |
6 | |
7 import os.path | |
8 from json import loads | |
9 | |
10 from galaxy.util import read_dbnames | |
11 from galaxy.util.object_wrapper import sanitize_lists_to_string | |
12 | |
13 | |
14 class GenomeBuilds(object): | |
15 default_value = "?" | |
16 default_name = "unspecified (?)" | |
17 | |
18 def __init__(self, app, data_table_name="__dbkeys__", load_old_style=True): | |
19 self._app = app | |
20 self._data_table_name = data_table_name | |
21 self._static_chrom_info_path = app.config.len_file_path | |
22 # A dbkey can be listed multiple times, but with different names, so we can't use dictionaries for lookups | |
23 if load_old_style: | |
24 self._static_dbkeys = list(read_dbnames(app.config.builds_file_path)) | |
25 else: | |
26 self._static_dbkeys = [] | |
27 | |
28 def get_genome_build_names(self, trans=None): | |
29 # FIXME: how to deal with key duplicates? | |
30 rval = [] | |
31 # load user custom genome builds | |
32 if trans is not None: | |
33 if trans.history: | |
34 # This is a little bit Odd. We are adding every .len file in the current history to dbkey list, | |
35 # but this is previous behavior from trans.db_names, so we'll continue to do it. | |
36 # It does allow one-off, history specific dbkeys to be created by a user. But we are not filtering, | |
37 # so a len file will be listed twice (as the build name and again as dataset name), | |
38 # if custom dbkey creation/conversion occurred within the current history. | |
39 datasets = trans.sa_session.query(self._app.model.HistoryDatasetAssociation) \ | |
40 .filter_by(deleted=False, history_id=trans.history.id, extension="len") | |
41 for dataset in datasets: | |
42 rval.append((dataset.dbkey, "%s (%s) [History]" % (dataset.name, dataset.dbkey))) | |
43 user = trans.user | |
44 if user and hasattr(user, 'preferences') and 'dbkeys' in user.preferences: | |
45 user_keys = loads(user.preferences['dbkeys']) | |
46 for key, chrom_dict in user_keys.items(): | |
47 rval.append((key, "%s (%s) [Custom]" % (chrom_dict['name'], key))) | |
48 # Load old builds.txt static keys | |
49 rval.extend(self._static_dbkeys) | |
50 # load dbkeys from dbkey data table | |
51 dbkey_table = self._app.tool_data_tables.get(self._data_table_name, None) | |
52 if dbkey_table is not None: | |
53 for field_dict in dbkey_table.get_named_fields_list(): | |
54 rval.append((field_dict['value'], field_dict['name'])) | |
55 return rval | |
56 | |
57 def get_chrom_info(self, dbkey, trans=None, custom_build_hack_get_len_from_fasta_conversion=True): | |
58 # FIXME: flag to turn off custom_build_hack_get_len_from_fasta_conversion should not be required | |
59 chrom_info = None | |
60 db_dataset = None | |
61 # Collect chromInfo from custom builds | |
62 if trans: | |
63 db_dataset = trans.db_dataset_for(dbkey) | |
64 if db_dataset: | |
65 chrom_info = db_dataset.file_name | |
66 else: | |
67 # Do Custom Build handling | |
68 if trans.user and ('dbkeys' in trans.user.preferences) and (dbkey in loads(trans.user.preferences['dbkeys'])): | |
69 custom_build_dict = loads(trans.user.preferences['dbkeys'])[dbkey] | |
70 # HACK: the attempt to get chrom_info below will trigger the | |
71 # fasta-to-len converter if the dataset is not available or, | |
72 # which will in turn create a recursive loop when | |
73 # running the fasta-to-len tool. So, use a hack in the second | |
74 # condition below to avoid getting chrom_info when running the | |
75 # fasta-to-len converter. | |
76 if 'fasta' in custom_build_dict and custom_build_hack_get_len_from_fasta_conversion: | |
77 # Build is defined by fasta; get len file, which is obtained from converting fasta. | |
78 build_fasta_dataset = trans.sa_session.query(trans.app.model.HistoryDatasetAssociation).get(custom_build_dict['fasta']) | |
79 chrom_info = build_fasta_dataset.get_converted_dataset(trans, 'len').file_name | |
80 elif 'len' in custom_build_dict: | |
81 # Build is defined by len file, so use it. | |
82 chrom_info = trans.sa_session.query(trans.app.model.HistoryDatasetAssociation).get(custom_build_dict['len']).file_name | |
83 # Check Data table | |
84 if not chrom_info: | |
85 dbkey_table = self._app.tool_data_tables.get(self._data_table_name, None) | |
86 if dbkey_table is not None: | |
87 chrom_info = dbkey_table.get_entry('value', dbkey, 'len_path', default=None) | |
88 # use configured server len path | |
89 if not chrom_info: | |
90 # Default to built-in build. | |
91 # Since we are using an unverified dbkey, we will sanitize the dbkey before use | |
92 chrom_info = os.path.join(self._static_chrom_info_path, "%s.len" % sanitize_lists_to_string(dbkey)) | |
93 chrom_info = os.path.abspath(chrom_info) | |
94 return (chrom_info, db_dataset) |