0
|
1 #!/usr/bin/env python
|
|
2
|
|
3 # https://github.com/ross/requests-futures
|
|
4 # http://docs.python-requests.org/en/master/user/quickstart/#more-complicated-post-requests
|
|
5
|
|
6 import sys, os, uuid, optparse, requests, json, time
|
|
7 #from requests_futures.sessions import FuturesSession
|
|
8
|
|
9 #### NN14 ####
|
|
10 SERVICE_URL = "http://nn14.galaxyproject.org:8080/";
|
|
11 #service_url = "http://127.0.0.1:8082/";
|
4
|
12 QUERY_URL = SERVICE_URL+"tree/<tree_id>/query";
|
0
|
13 STATUS_URL = SERVICE_URL+"status/<query_id>";
|
|
14 ##############
|
|
15 # query delay in seconds
|
|
16 QUERY_DELAY = 30;
|
|
17 ##############
|
|
18
|
|
19 __version__ = "1.0.0";
|
|
20 VALID_CHARS = '.-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ '
|
|
21 # in the case of collections, exitcodes equal to 0 and 1 are not considered errors
|
|
22 ERR_EXIT_CODE = 2;
|
|
23 OK_EXIT_CODE = 0;
|
|
24
|
|
25 def raiseException( exitcode, message, output_dir_path, errorfilename ):
|
|
26 errorfilepath = os.path.join(output_dir_path, errorfilename+"_txt");
|
|
27 with open(errorfilepath, 'w') as out:
|
|
28 out.write(message);
|
|
29 sys.exit(exitcode);
|
|
30
|
|
31 def query_request( options, args, payload ):
|
|
32 output_dir_path = options.outputdir;
|
|
33 # add additional parameters to the payload
|
|
34 #payload["tree_id"] = str(options.treeid);
|
|
35 payload["search_mode"] = str(options.search);
|
|
36 payload["exact_algorithm"] = int(options.exact);
|
|
37 payload["search_threshold"] = float(options.sthreshold);
|
|
38 # set the content type to application/json
|
|
39 headers = {'Content-type': 'application/json'};
|
|
40
|
|
41 # create a session
|
|
42 session = requests.Session();
|
|
43 # make a synchronous post request to the query route
|
4
|
44 QUERY_URL.replace("<tree_id>", str(options.treeid));
|
|
45 req = session.post(QUERY_URL.replace("<tree_id>", str(options.treeid)), headers=headers, json=payload);
|
0
|
46 resp_code = req.status_code;
|
|
47 #print(str(req.content)+"\n\n");
|
|
48 if resp_code == requests.codes.ok:
|
|
49 resp_content = str(req.content);
|
|
50 # convert out to json
|
|
51 json_content = json.loads(resp_content);
|
|
52 # retrieve query id
|
|
53 query_id = json_content['query_id'];
|
|
54 query_processed = False;
|
|
55 # results json content
|
|
56 json_status_content = None;
|
|
57 while query_processed is False:
|
|
58 # create a new session
|
|
59 session = requests.Session();
|
|
60 # make a synchronous get request to the status route
|
|
61 status_query_url = STATUS_URL.replace("<query_id>", query_id);
|
|
62 status_req = session.get(status_query_url);
|
|
63 status_resp_content = str(status_req.content);
|
|
64 #print(status_resp_content+"\n\n");
|
|
65 # convert out to json
|
|
66 json_status_content = json.loads(status_resp_content);
|
|
67 # take a look at the state
|
|
68 # state attribute is always available
|
|
69 if json_status_content['state'] == 'SUCCESS':
|
|
70 query_processed = True;
|
|
71 break;
|
|
72 elif json_status_content['state'] in ['FAILURE', 'REVOKED']:
|
|
73 return raiseException( ERR_EXIT_CODE, "Query ID: "+str(query_id)+"\nQuery status: "+str(json_status_content['state']), output_dir_path, str(options.errorfile) );
|
|
74 else:
|
|
75 time.sleep(QUERY_DELAY); # in seconds
|
|
76
|
|
77 out_file_format = "tabular";
|
|
78 for block in json_status_content['results']:
|
|
79 seq_id = block['sequence_id'];
|
|
80 # put response block in the output collection
|
|
81 output_file_path = os.path.join(output_dir_path, seq_id + "_" + out_file_format);
|
|
82 accessions_list = "";
|
|
83 hits_block = block['hits'];
|
8
|
84 accessions_dict = { };
|
4
|
85 is_sabutan = False;
|
0
|
86 for hit in hits_block:
|
|
87 if type(hit) is dict: # sabutan
|
4
|
88 #accessions_list = accessions_list + str(hit['accession_number']) + "\t" + str(hit['score']) + "\n";
|
|
89 accession_number = hit['accession_number'];
|
|
90 score = hit['score'];
|
|
91 score_split = score.split("/");
|
|
92 accessions_dict[accession_number] = "{0:.6f}".format(float(score_split[0])/float(score_split[1]));
|
|
93 is_sabutan = True;
|
0
|
94 else: # all-some
|
|
95 accessions_list = accessions_list + str(hit) + "\n";
|
4
|
96 if is_sabutan:
|
|
97 sorted_accessions = sorted(accessions_dict, key=lambda i: float(accessions_dict[i]));
|
|
98 for acc in sorted_accessions:
|
|
99 accessions_list = accessions_list + str(acc) + "\t" + str(accessions_dict[acc]) + "\n";
|
0
|
100 with open(output_file_path, 'w') as out:
|
|
101 out.write(accessions_list.strip());
|
|
102 return sys.exit(OK_EXIT_CODE);
|
|
103 else:
|
|
104 return raiseException( ERR_EXIT_CODE, "Unable to query the remote server. Please try again in a while.", output_dir_path, str(options.errorfile) );
|
|
105
|
|
106 def query( options, args ):
|
|
107 output_dir_path = options.outputdir;
|
|
108 multiple_data = {};
|
|
109 comma_sep_file_paths = options.files;
|
|
110 #print("files: "+str(comma_sep_file_paths)+" - "+str(type(comma_sep_file_paths)));
|
|
111 # check if options.files contains at least one file path
|
|
112 if comma_sep_file_paths is not None:
|
|
113 # split file paths
|
|
114 file_paths = comma_sep_file_paths.split(",");
|
|
115 # split file names
|
|
116 comma_sep_file_names = str(options.names);
|
|
117 #print("names: "+str(comma_sep_file_names));
|
|
118 file_names = comma_sep_file_names.split(",");
|
|
119 for idx, file_path in enumerate(file_paths):
|
|
120 #file_name = file_names[idx];
|
|
121 with open(file_path, 'r') as content_file:
|
|
122 for line in content_file:
|
|
123 if line.strip() != "":
|
|
124 line_split = line.strip().split("\t"); # split on tab
|
|
125 if len(line_split) == 2: # 0:id , 1:seq , otherwise skip line
|
|
126 seq_id = line_split[0];
|
|
127 # fix seq_id using valid chars only
|
|
128 seq_id = ''.join(e for e in seq_id if e in VALID_CHARS)
|
|
129 seq_text = line_split[1];
|
|
130 if seq_id in multiple_data:
|
|
131 return raiseException( ERR_EXIT_CODE, "Error: the id '"+seq_id+"' is duplicated", output_dir_path, str(options.errorfile) );
|
|
132 multiple_data[seq_id] = seq_text;
|
|
133 if len(multiple_data) > 0:
|
|
134 return query_request( options, args, multiple_data );
|
|
135 #return echo( options, args );
|
|
136 else:
|
|
137 return raiseException( ERR_EXIT_CODE, "An error has occurred. Please be sure that your input files are valid.", output_dir_path, str(options.errorfile) );
|
|
138 else:
|
|
139 # try with the sequence in --sequence
|
|
140 text_content = options.sequences;
|
|
141 #print("sequences: "+text_content);
|
|
142 # check if options.sequences contains a list of sequences (one for each row)
|
|
143 if text_content is not None:
|
|
144 text_content = str(text_content);
|
|
145 if text_content.strip():
|
|
146 # populate a dictionary with the files containing the sequences to query
|
|
147 text_content = text_content.strip().split("__cn__"); # split on new line
|
|
148 for line in text_content:
|
|
149 if line.strip() != "":
|
|
150 line_split = line.strip().split("__tc__"); # split on tab
|
|
151 if len(line_split) == 2: # 0:id , 1:seq , otherwise skip line
|
|
152 seq_id = line_split[0];
|
|
153 # fix seq_id using valid chars only
|
|
154 seq_id = ''.join(e for e in seq_id if e in VALID_CHARS)
|
|
155 seq_text = line_split[1];
|
|
156 if seq_id in multiple_data:
|
|
157 return raiseException( ERR_EXIT_CODE, "Error: the id '"+seq_id+"' is duplicated", output_dir_path, str(options.errorfile) );
|
|
158 multiple_data[seq_id] = seq_text;
|
|
159 if len(multiple_data) > 0:
|
|
160 return query_request( options, args, multiple_data );
|
|
161 #return echo( options, args );
|
|
162 else:
|
|
163 return raiseException( ERR_EXIT_CODE, "An error has occurred. Please be sure that your input files are valid.", output_dir_path, str(options.errorfile) );
|
|
164 else:
|
|
165 return raiseException( ERR_EXIT_CODE, "You have to insert at least one row formatted as a tab delimited (ID, SEQUENCE) couple", output_dir_path, str(options.errorfile) );
|
|
166 return ERR_EXIT_CODE;
|
|
167
|
|
168 def __main__():
|
|
169 # Parse the command line options
|
|
170 usage = "Usage: query.py --files comma_sep_file_paths --names comma_seq_file_names --sequences sequences_text --search search_mode --exact exact_alg --sthreshold threshold --outputdir output_dir_path";
|
|
171 parser = optparse.OptionParser(usage = usage);
|
|
172 parser.add_option("-v", "--version", action="store_true", dest="version",
|
|
173 default=False, help="display version and exit")
|
|
174 parser.add_option("-f", "--files", type="string",
|
|
175 action="store", dest="files", help="comma separated files path");
|
|
176 parser.add_option("-n", "--names", type="string",
|
|
177 action="store", dest="names", help="comma separated names associated to the files specified in --files");
|
|
178 parser.add_option("-s", "--sequences", type="string",
|
|
179 action="store", dest="sequences", help="contains a list of sequences (one for each row)");
|
|
180 parser.add_option("-a", "--fasta", type="string",
|
|
181 action="store", dest="fasta", help="contains the content of a fasta file");
|
4
|
182 parser.add_option("-x", "--search", type="string", default="rrr",
|
0
|
183 action="store", dest="search", help="search mode");
|
|
184 parser.add_option("-e", "--exact", type="int", default=0,
|
|
185 action="store", dest="exact", help="exact algorithm (required if search is 1 only)");
|
4
|
186 parser.add_option("-k", "--tree", type="int", default=0,
|
|
187 action="store", dest="treeid", help="the id of the tree that will be queried");
|
0
|
188 parser.add_option("-t", "--sthreshold", type="float",
|
|
189 action="store", dest="sthreshold", help="threshold applied to the search algrithm");
|
|
190 parser.add_option("-o", "--outputdir", type="string", default="output",
|
|
191 action="store", dest="outputdir", help="output directory (collection) path");
|
4
|
192 parser.add_option("-r", "--errorfile", type="string", default="error_txt",
|
0
|
193 action="store", dest="errorfile", help="error file name containing error messages");
|
|
194
|
|
195 # TEST
|
|
196 #sequences = 'NM_001169378.2__tc__atttcggatgctttggagggaggaactctagtgctgcattgattggggcgtgtgttaatgatattcccagttcgcatggcgagcatcgattcctggtacgtatgtgggccccttgactcccacttatcgcacttgtcgttcgcaatttgcatgaattccgcttcgtctgaaacgcacttgcgccagacttctccggctggtctgatctggtctgtgatccggtctggtggggcgccagttgcgtttcgagctcatcaccagtcactccgcagtcgcattctgccagaggtctccgatcaagagcgcttctccattcgagattcaaacgcagcgcggtctgacgccgccacatcgagtgaaatccatatcgatggccacattcacacaggacgagatcgacttcctgcgcagccatggcaacgagctgtgtgccaagacctggctgggattgtgggatccgaagcgggctgtgcaccagcaggagcagcgcgaactgatgatggacaagtatgagcggaagcgatactacctggagccggccagtcctcttaagtcgctggccaatgcggtcaacctgaagtcgtctgctccggcgacgaaccacactcagaatggccaccaaaatgggtatgccagcatccatttgacgcctcctgctgcccagcggacctcggccaatggattgcagaaggtggccaactcgtcgagtaactcttctggaaagacctcatcctcgatcagtaggccacactataatcaccagaacaacagccaaaacaacaatcacgatgcctttggcctgggtggcggattgagcagcctgaacagcgccggttccacatccactggagctctttccgacaccagcagttgtgctagcaatggcttcggtgcggactgcgactttgtggctgactttggctcggccaacattttcgacgccacatcggcgcgttccacaggatcgccggcggtgtcgtccgtgtcctcagtgggttccagcaatggctacgccaaggtgcagcccatccgggcagctcatctccagcagcaacagcagttgcagcagcagctgcatcagcagcagctcctcaatggcaatggtcatcagggcactgagaactttgccgacttcgatcacgctcccatctacaatgcagtggctccaccgacttttaacgattggatcagcgactggagcaggcggggcttccacgatcccttcgacgattgcgatgactcgccaccaggtgcccgccctccagcacctgcgccagctcctgctcaagttcccgcagtatcatcaccattgccaaccgtccgagaagaaccagagcttgcgtggaatttttgggaggacgagatgcgaatagaggcgcaggaaaaggagtcccaaactaaacagccggagttgggctactccttttcgattagtactactacgcccctttccccttcgaatcccttcctgccctaccttgtcagtgaggagcagcatcgaaatcatccagagaagccctccttttcgtattcgttgttcagctccatatcaaatagttcgcaagaagatcaggcggatgatcatgagatgaatgttttaaatgccaatttccatgatttctttacgtggagtgctcccttgcagaacggccatacgaccagtccgcccaagggcggaaatgcagcgatggcgcccagtgaggatcgatatgccgctcttaaggatctcgacgagcagctgcgagaactgaaggccagcgaaagcgccacagagacgcccacgcccaccagtggcaatgttcaggccacagatgcctttggtggagccctcaacaacaatccaaatcccttcaagggccagcaacagcagcagctcagcagccatgtggtgaatccattccagcagcagcaacagcagcagcaccagcagaatctctatggccagttgacgctcataccaaatgcctacggcagcagttcccagcagcagatggggcaccatctcctccagcagcagcagcagcaacagcagagcttcttcaacttcaacaacaacgggttcgccatctcgcagggtctgcccaacggctgcggcttcggcagcatgcaacccgctcctgtgatggccaacaatccctttgcagccagcggcgccatgaacaccaacaatccattcttatgagactcaacccgggagaatccgcctcgcgccacctggcagaggcgctgagccagcgaacaaagagcagacgcggaggaaccgaaccgaaattagtccattttactaacaatagcgttaatctatgtatacataatgcacgccggagagcactctttgtgtacatagcccaaatatgtacacccgaaaggctccacgctgacgctagtcctcgcggatggcggaggcggactggggcgttgatatattcttttacatggtaactctactctaacgtttacggatacggatatttgtatttgccgtttgccctagaactctatacttgtactaagcgcccatgaacacttcatccactaacatagctactaatcctcatcctagtggaggatgcagttggtccagacactctgttatttgttttatccatcctcgtacttgtctttgtcccatttagcactttcgttgcggataagaactttgtcagttattgattgtgtggccttaataagattataaaactaaatattataacgtacgactatacatatacggatacagatacagattcagacacagttagtacagatacagatatacatatacgcttttgtacctaatgaattgcttcttgtttccattgctaatcatctgcttttcgtgtgctaattttatacactagtacgtgcgatatcggccgtgcagatagattgctcagctcgcgagtcaagcctcttttggttgcacccacggcagacatttgtacatatactgtctgattgtaagcctcgtgtaatacctccattaacaccactcccccaccacccatccatcgaaccccgaatccatgactcaattcactgctcacatgtccatgcccatgccttaacgtgtcaaacattatcgaagccttaaagttatttaaaactacgaaatttcaataaaaacaaataagaacgctatc';
|
|
197 #(options, args) = parser.parse_args(['-x', 'rrr', '-t', 0.5, '-s', sequences, '-o', 'collection_content']);
|
|
198
|
|
199 (options, args) = parser.parse_args();
|
|
200 if options.version:
|
|
201 print __version__;
|
|
202 else:
|
|
203 # create output dir (collection)
|
|
204 output_dir_path = options.outputdir;
|
|
205 if not os.path.exists(output_dir_path):
|
|
206 os.makedirs(output_dir_path);
|
|
207
|
|
208 return query( options, args );
|
|
209
|
|
210 if __name__ == "__main__": __main__()
|