comparison elink.py @ 3:e267701c187b draft

"planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/tools/ncbi_entrez_eutils commit dae34e5e182b4cceb808d7353080f14aa9a78ca9"
author iuc
date Wed, 23 Sep 2020 09:48:26 +0000
parents 30150bd36c9a
children
comparison
equal deleted inserted replaced
2:c6096cd97120 3:e267701c187b
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 from __future__ import print_function
3 2
4 import argparse 3 import argparse
5 import json 4 import json
5 import logging
6 import os
6 7
7 import eutils 8 import eutils
9
10
11 logging.basicConfig(level=logging.INFO)
8 12
9 13
10 if __name__ == '__main__': 14 if __name__ == '__main__':
11 parser = argparse.ArgumentParser(description='EFetch', epilog='') 15 parser = argparse.ArgumentParser(description='EFetch', epilog='')
12 parser.add_argument('db', help='Database to use, sometimes "none" (e.g. *check)') 16 parser.add_argument('db', help='Database to use, sometimes "none" (e.g. *check)')
13 parser.add_argument('dbfrom', help='Database containing input UIDs') 17 parser.add_argument('dbfrom', help='Database containing input UIDs')
14 parser.add_argument('cmd', choices=['neighbor', 'neighbor_score', 18 parser.add_argument('cmd', choices=['neighbor', 'neighbor_score',
15 'neighbor_history', 'acheck', 'ncheck', 'lcheck', 19 'neighbor_history', 'acheck', 'ncheck', 'lcheck',
16 'llinks', 'llinkslib', 'prlinks'], 20 'llinks', 'llinkslib', 'prlinks'],
17 help='ELink command mode') 21 help='ELink command mode')
18 # Only used in case of neighbor_history 22
19 parser.add_argument('--history_out', type=argparse.FileType('w'), 23 parser.add_argument('--version', action='version', version=eutils.Client.getVersion(), help='Version (reports Biopython version)')
20 help='Output history file', default='-')
21 24
22 parser.add_argument('--user_email', help="User email") 25 parser.add_argument('--user_email', help="User email")
23 parser.add_argument('--admin_email', help="Admin email") 26 parser.add_argument('--admin_email', help="Admin email")
27
24 # ID Sources 28 # ID Sources
29 parser.add_argument('--id_xml', help='list of ids in an xml file as returned by esearch or elink')
30 parser.add_argument('--id_json', help='list of ids in a json file as returned by esearch or elink')
25 parser.add_argument('--id_list', help='list of ids') 31 parser.add_argument('--id_list', help='list of ids')
26 parser.add_argument('--id', help='Comma separated individual IDs') 32 parser.add_argument('--id', help='Comma separated individual IDs')
27 parser.add_argument('--history_file', help='Fetch results from previous query') 33 parser.add_argument('--history_file', help='Fetch results from previous query')
34 parser.add_argument('--history_xml', help='Fetch results from previous query')
35
36 # Optional
37 parser.add_argument('--linkname', help='Restrict results to a specific link source')
38 parser.add_argument('--retmode', choices=['xml', 'json', 'uilist'], help='Output format')
28 39
29 # TODO: dates, linkname, term, holding 40 # TODO: dates, linkname, term, holding
30 # neighbor or neighbor_history and dbfrom is pubmed 41 # neighbor or neighbor_history and dbfrom is pubmed
31 # parser.add_argument('--datetype', help='Date type') 42 # parser.add_argument('--datetype', help='Date type')
32 # parser.add_argument('--reldate', help='In past N days') 43 # parser.add_argument('--reldate', help='In past N days')
35 46
36 # Output 47 # Output
37 args = parser.parse_args() 48 args = parser.parse_args()
38 49
39 c = eutils.Client(history_file=args.history_file, user_email=args.user_email, admin_email=args.admin_email) 50 c = eutils.Client(history_file=args.history_file, user_email=args.user_email, admin_email=args.admin_email)
40 merged_ids = c.parse_ids(args.id_list, args.id, args.history_file)
41 51
42 payload = { 52 payload = {
43 'dbfrom': args.dbfrom, 53 'dbfrom': args.dbfrom,
44 'cmd': args.cmd, 54 'cmd': args.cmd,
45 } 55 }
46 if args.history_file is not None:
47 payload.update(c.get_history())
48 else:
49 payload['id'] = ','.join(merged_ids)
50 56
51 # DB can be 'none' in a few cases. 57 # DB can be 'none' in a few cases.
52 if args.db != "none": 58 if args.db != "none":
53 payload['db'] = args.db 59 payload['db'] = args.db
54 60
55 results = c.link(**payload) 61 if args.linkname is not None:
62 payload['linkname'] = args.linkname
56 63
57 if args.cmd == "neighbor_history": 64 results = []
58 history = c.extract_history(results) 65 qkeys = []
59 args.history_out.write(json.dumps(history, indent=4)) 66 if args.history_file is not None or args.history_xml is not None:
67 payload['retmode'] = args.retmode
68 if args.history_file is not None:
69 input_histories = c.get_histories()
70 else:
71 input_histories = c.extract_histories_from_xml_file(args.history_xml)
72 for hist in input_histories:
73 qkeys += [hist['query_key']]
74 tmp_payload = payload
75 tmp_payload.update(hist)
76 results += [c.link(**tmp_payload)]
77 else:
78 # There is no uilist retmode
79 if args.retmode == "uilist":
80 payload['retmode'] = 'xml'
81 else:
82 payload['retmode'] = args.retmode
83 merged_ids = c.parse_ids(args.id_list, args.id, args.history_file, args.id_xml, args.id_json)
84 payload['id'] = ','.join(merged_ids)
85 qkeys += [1]
86 results += [c.link(**payload)]
60 87
61 print(results) 88 # There could be multiple sets of results if a history was supplied
89 if args.history_file is not None or args.history_xml is not None:
90 # Multiple result sets can be returned
91 # Create a directory for the output files
92 current_directory = os.getcwd()
93 final_directory = os.path.join(current_directory, r'downloads')
94 if not os.path.exists(final_directory):
95 os.makedirs(final_directory)
96
97 logging.info("Writing files:")
98 # When rettype is uilist, convert to text format (which elink does not do)
99 count = 0
100 if args.retmode == 'uilist':
101 for result in results:
102 qkey = qkeys[count]
103 count += 1
104 ids = c.xmlstring2UIlist(result)
105 file_path = os.path.join('downloads', '%s-querykey%s.tabular' % (args.db, qkey))
106 logging.info('%s.tabular' % (args.db))
107 with open(file_path, 'w') as handle:
108 for id in ids:
109 handle.write(id)
110 handle.write(os.linesep)
111 elif args.retmode == 'json':
112 for result in results:
113 qkey = qkeys[count]
114 count += 1
115 file_path = os.path.join('downloads', '%s-querykey%s.json' % (args.db, qkey))
116 logging.info('%s-link%s.json' % (args.db, count))
117 with open(file_path, 'w') as handle:
118 json_data = c.jsonstring2jsondata(result)
119 handle.write(json.dumps(json_data, indent=4))
120 else:
121 for result in results:
122 qkey = qkeys[count]
123 count += 1
124 file_path = os.path.join('downloads', '%s-querykey%s.xml' % (args.db, qkey))
125 logging.info('%s-link%s.xml' % (args.db, count))
126 with open(file_path, 'w') as handle:
127 handle.write(result)
128 else:
129 # When rettype is uilist, convert to text format (which elink does not do)
130 if args.retmode == 'uilist':
131 ids = c.xmlstring2UIlist(results[0])
132 for id in ids:
133 print(id)
134 elif args.retmode == 'json':
135 json_data = c.jsonstring2jsondata(results[0])
136 print(json.dumps(json_data, indent=4))
137 else:
138 print(results[0])