Mercurial > repos > iuc > ncbi_eutils_esummary
comparison esummary.py @ 3:254f40d3ae2b draft
"planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/tools/ncbi_entrez_eutils commit dae34e5e182b4cceb808d7353080f14aa9a78ca9"
author | iuc |
---|---|
date | Wed, 23 Sep 2020 09:50:11 +0000 |
parents | c8d4ea6376a7 |
children |
comparison
equal
deleted
inserted
replaced
2:cb5a0fe9e036 | 3:254f40d3ae2b |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 from __future__ import print_function | |
3 | 2 |
4 import argparse | 3 import argparse |
4 import json | |
5 import logging | |
6 import os | |
5 | 7 |
6 import eutils | 8 import eutils |
9 | |
10 | |
11 logging.basicConfig(level=logging.INFO) | |
7 | 12 |
8 | 13 |
9 if __name__ == '__main__': | 14 if __name__ == '__main__': |
10 parser = argparse.ArgumentParser(description='ESummary', epilog='') | 15 parser = argparse.ArgumentParser(description='ESummary', epilog='') |
11 parser.add_argument('db', help='Database to use') | 16 parser.add_argument('db', help='Database to use') |
17 parser.add_argument('--user_email', help="User email") | |
18 parser.add_argument('--admin_email', help="Admin email") | |
19 | |
20 parser.add_argument('--version', action='version', version=eutils.Client.getVersion(), help='Version (reports Biopython version)') | |
21 | |
22 # ID source | |
23 parser.add_argument('--id_xml', help='list of ids in an xml file as returned by esearch or elink') | |
24 parser.add_argument('--id_json', help='list of ids in a json file as returned by esearch or elink') | |
12 parser.add_argument('--id_list', help='list of ids') | 25 parser.add_argument('--id_list', help='list of ids') |
13 parser.add_argument('--id', help='Comma separated individual IDs') | 26 parser.add_argument('--id', help='Comma separated individual IDs') |
14 parser.add_argument('--history_file', help='Filter existing history') | 27 parser.add_argument('--history_file', help='Fetch results from previous query') |
15 parser.add_argument('--user_email', help="User email") | 28 parser.add_argument('--history_xml', help='Fetch results from previous query') |
16 parser.add_argument('--admin_email', help="Admin email") | 29 |
30 # Output | |
31 parser.add_argument('--retmode', help='Retmode') | |
32 parser.add_argument('--retstart', type=int, default=0, help='Retstart - Starting rec number (0)') | |
33 parser.add_argument('--retmax', type=int, default=20, help='Retmax - max number of recs returned (20, max 100000') | |
34 | |
17 args = parser.parse_args() | 35 args = parser.parse_args() |
18 | 36 |
19 c = eutils.Client(history_file=args.history_file, user_email=args.user_email, admin_email=args.admin_email) | 37 c = eutils.Client(history_file=args.history_file, user_email=args.user_email, admin_email=args.admin_email) |
20 | |
21 merged_ids = c.parse_ids(args.id_list, args.id, args.history_file) | |
22 | 38 |
23 payload = { | 39 payload = { |
24 'db': args.db, | 40 'db': args.db, |
25 } | 41 } |
26 | 42 |
27 if args.history_file is not None: | 43 for attr in ('retmode', 'retmax', 'retstart'): |
28 payload.update(c.get_history()) | 44 if getattr(args, attr, None) is not None: |
45 payload[attr] = getattr(args, attr) | |
46 | |
47 results = [] | |
48 qkeys = [] | |
49 if args.history_file is not None or args.history_xml is not None: | |
50 payload['retmode'] = args.retmode | |
51 if args.history_file is not None: | |
52 input_histories = c.get_histories() | |
53 else: | |
54 input_histories = c.extract_histories_from_xml_file(args.history_xml) | |
55 | |
56 for hist in input_histories: | |
57 qkeys += [hist['query_key']] | |
58 tmp_payload = payload | |
59 tmp_payload.update(hist) | |
60 results += [c.summary(**tmp_payload)] | |
29 else: | 61 else: |
62 # There is no uilist retmode | |
63 if args.retmode == "uilist": | |
64 payload['retmode'] = 'xml' | |
65 else: | |
66 payload['retmode'] = args.retmode | |
67 merged_ids = c.parse_ids(args.id_list, args.id, args.history_file, args.id_xml, args.id_json) | |
30 payload['id'] = ','.join(merged_ids) | 68 payload['id'] = ','.join(merged_ids) |
69 qkeys += [1] | |
70 results += [c.summary(**payload)] | |
31 | 71 |
32 print(c.summary(**payload)) | 72 # There could be multiple sets of results if a history was supplied |
73 if args.history_file is not None or args.history_xml is not None: | |
74 # Multiple result sets can be returned | |
75 # Create a directory for the output files | |
76 current_directory = os.getcwd() | |
77 final_directory = os.path.join(current_directory, r'downloads') | |
78 if not os.path.exists(final_directory): | |
79 os.makedirs(final_directory) | |
80 | |
81 logging.info("Writing files:") | |
82 count = 0 | |
83 if args.retmode == 'json': | |
84 for result in results: | |
85 qkey = qkeys[count] | |
86 count += 1 | |
87 file_path = os.path.join('downloads', '%s-querykey%s.json' % (args.db, qkey)) | |
88 logging.info('%s-link%s.json' % (args.db, count)) | |
89 with open(file_path, 'w') as handle: | |
90 json_data = c.jsonstring2jsondata(result) | |
91 handle.write(json.dumps(json_data, indent=4)) | |
92 else: | |
93 for result in results: | |
94 qkey = qkeys[count] | |
95 count += 1 | |
96 file_path = os.path.join('downloads', '%s-querykey%s.xml' % (args.db, qkey)) | |
97 logging.info('%s-link%s.xml' % (args.db, count)) | |
98 with open(file_path, 'w') as handle: | |
99 handle.write(result) | |
100 else: | |
101 # When rettype is uilist, convert to text format (which elink does not do) | |
102 if args.retmode == 'json': | |
103 json_data = c.jsonstring2jsondata(results[0]) | |
104 print(json.dumps(json_data, indent=4)) | |
105 else: | |
106 print(results[0]) |