0
|
1 #!/usr/bin/env python
|
|
2
|
|
3 # https://github.com/ross/requests-futures
|
|
4 # http://docs.python-requests.org/en/master/user/quickstart/#more-complicated-post-requests
|
|
5
|
|
6 import sys, os, uuid, optparse, requests, json, time
|
|
7 #from requests_futures.sessions import FuturesSession
|
|
8
|
|
9 #### NN14 ####
|
|
10 SERVICE_URL = "http://nn14.galaxyproject.org:8080/";
|
|
11 #service_url = "http://127.0.0.1:8082/";
|
|
12 QUERY_URL = SERVICE_URL+"tree/1/query";
|
|
13 STATUS_URL = SERVICE_URL+"status/<query_id>";
|
|
14 ##############
|
|
15 # query delay in seconds
|
|
16 QUERY_DELAY = 30;
|
|
17 ##############
|
|
18
|
|
19 __version__ = "1.0.0";
|
|
20 VALID_CHARS = '.-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ '
|
|
21 # in the case of collections, exitcodes equal to 0 and 1 are not considered errors
|
|
22 ERR_EXIT_CODE = 2;
|
|
23 OK_EXIT_CODE = 0;
|
|
24
|
|
25 def raiseException( exitcode, message, output_dir_path, errorfilename ):
|
|
26 errorfilepath = os.path.join(output_dir_path, errorfilename+"_txt");
|
|
27 with open(errorfilepath, 'w') as out:
|
|
28 out.write(message);
|
|
29 sys.exit(exitcode);
|
|
30
|
|
31 def query_request( options, args, payload ):
|
|
32 output_dir_path = options.outputdir;
|
|
33 # add additional parameters to the payload
|
|
34 #payload["tree_id"] = str(options.treeid);
|
|
35 payload["search_mode"] = str(options.search);
|
|
36 payload["exact_algorithm"] = int(options.exact);
|
|
37 payload["search_threshold"] = float(options.sthreshold);
|
|
38 # set the content type to application/json
|
|
39 headers = {'Content-type': 'application/json'};
|
|
40
|
|
41 # create a session
|
|
42 session = requests.Session();
|
|
43 # make a synchronous post request to the query route
|
|
44 req = session.post(QUERY_URL, headers=headers, json=payload);
|
|
45 resp_code = req.status_code;
|
|
46 #print(str(req.content)+"\n\n");
|
|
47 if resp_code == requests.codes.ok:
|
|
48 resp_content = str(req.content);
|
|
49 # convert out to json
|
|
50 json_content = json.loads(resp_content);
|
|
51 # retrieve query id
|
|
52 query_id = json_content['query_id'];
|
|
53 query_processed = False;
|
|
54 # results json content
|
|
55 json_status_content = None;
|
|
56 while query_processed is False:
|
|
57 # create a new session
|
|
58 session = requests.Session();
|
|
59 # make a synchronous get request to the status route
|
|
60 status_query_url = STATUS_URL.replace("<query_id>", query_id);
|
|
61 status_req = session.get(status_query_url);
|
|
62 status_resp_content = str(status_req.content);
|
|
63 #print(status_resp_content+"\n\n");
|
|
64 # convert out to json
|
|
65 json_status_content = json.loads(status_resp_content);
|
|
66 # take a look at the state
|
|
67 # state attribute is always available
|
|
68 if json_status_content['state'] == 'SUCCESS':
|
|
69 query_processed = True;
|
|
70 break;
|
|
71 elif json_status_content['state'] in ['FAILURE', 'REVOKED']:
|
|
72 return raiseException( ERR_EXIT_CODE, "Query ID: "+str(query_id)+"\nQuery status: "+str(json_status_content['state']), output_dir_path, str(options.errorfile) );
|
|
73 else:
|
|
74 time.sleep(QUERY_DELAY); # in seconds
|
|
75
|
|
76 out_file_format = "tabular";
|
|
77 for block in json_status_content['results']:
|
|
78 seq_id = block['sequence_id'];
|
|
79 # put response block in the output collection
|
|
80 output_file_path = os.path.join(output_dir_path, seq_id + "_" + out_file_format);
|
|
81 accessions_list = "";
|
|
82 hits_block = block['hits'];
|
|
83 for hit in hits_block:
|
|
84 if type(hit) is dict: # sabutan
|
|
85 accessions_list = accessions_list + hit['accession_number'] + "\t" + hit['score'] + "\n";
|
|
86 else: # all-some
|
|
87 accessions_list = accessions_list + str(hit) + "\n";
|
|
88 with open(output_file_path, 'w') as out:
|
|
89 out.write(accessions_list.strip());
|
|
90 return sys.exit(OK_EXIT_CODE);
|
|
91 else:
|
|
92 return raiseException( ERR_EXIT_CODE, "Unable to query the remote server. Please try again in a while.", output_dir_path, str(options.errorfile) );
|
|
93
|
|
94 def query( options, args ):
|
|
95 output_dir_path = options.outputdir;
|
|
96 multiple_data = {};
|
|
97 comma_sep_file_paths = options.files;
|
|
98 #print("files: "+str(comma_sep_file_paths)+" - "+str(type(comma_sep_file_paths)));
|
|
99 # check if options.files contains at least one file path
|
|
100 if comma_sep_file_paths is not None:
|
|
101 # split file paths
|
|
102 file_paths = comma_sep_file_paths.split(",");
|
|
103 # split file names
|
|
104 comma_sep_file_names = str(options.names);
|
|
105 #print("names: "+str(comma_sep_file_names));
|
|
106 file_names = comma_sep_file_names.split(",");
|
|
107 for idx, file_path in enumerate(file_paths):
|
|
108 #file_name = file_names[idx];
|
|
109 with open(file_path, 'r') as content_file:
|
|
110 for line in content_file:
|
|
111 if line.strip() != "":
|
|
112 line_split = line.strip().split("\t"); # split on tab
|
|
113 if len(line_split) == 2: # 0:id , 1:seq , otherwise skip line
|
|
114 seq_id = line_split[0];
|
|
115 # fix seq_id using valid chars only
|
|
116 seq_id = ''.join(e for e in seq_id if e in VALID_CHARS)
|
|
117 seq_text = line_split[1];
|
|
118 if seq_id in multiple_data:
|
|
119 return raiseException( ERR_EXIT_CODE, "Error: the id '"+seq_id+"' is duplicated", output_dir_path, str(options.errorfile) );
|
|
120 multiple_data[seq_id] = seq_text;
|
|
121 if len(multiple_data) > 0:
|
|
122 return query_request( options, args, multiple_data );
|
|
123 #return echo( options, args );
|
|
124 else:
|
|
125 return raiseException( ERR_EXIT_CODE, "An error has occurred. Please be sure that your input files are valid.", output_dir_path, str(options.errorfile) );
|
|
126 else:
|
|
127 # try with the sequence in --sequence
|
|
128 text_content = options.sequences;
|
|
129 #print("sequences: "+text_content);
|
|
130 # check if options.sequences contains a list of sequences (one for each row)
|
|
131 if text_content is not None:
|
|
132 text_content = str(text_content);
|
|
133 if text_content.strip():
|
|
134 # populate a dictionary with the files containing the sequences to query
|
|
135 text_content = text_content.strip().split("__cn__"); # split on new line
|
|
136 for line in text_content:
|
|
137 if line.strip() != "":
|
|
138 line_split = line.strip().split("__tc__"); # split on tab
|
|
139 if len(line_split) == 2: # 0:id , 1:seq , otherwise skip line
|
|
140 seq_id = line_split[0];
|
|
141 # fix seq_id using valid chars only
|
|
142 seq_id = ''.join(e for e in seq_id if e in VALID_CHARS)
|
|
143 seq_text = line_split[1];
|
|
144 if seq_id in multiple_data:
|
|
145 return raiseException( ERR_EXIT_CODE, "Error: the id '"+seq_id+"' is duplicated", output_dir_path, str(options.errorfile) );
|
|
146 multiple_data[seq_id] = seq_text;
|
|
147 if len(multiple_data) > 0:
|
|
148 return query_request( options, args, multiple_data );
|
|
149 #return echo( options, args );
|
|
150 else:
|
|
151 return raiseException( ERR_EXIT_CODE, "An error has occurred. Please be sure that your input files are valid.", output_dir_path, str(options.errorfile) );
|
|
152 else:
|
|
153 return raiseException( ERR_EXIT_CODE, "You have to insert at least one row formatted as a tab delimited (ID, SEQUENCE) couple", output_dir_path, str(options.errorfile) );
|
|
154 return ERR_EXIT_CODE;
|
|
155
|
|
156 def __main__():
|
|
157 # Parse the command line options
|
|
158 usage = "Usage: query.py --files comma_sep_file_paths --names comma_seq_file_names --sequences sequences_text --search search_mode --exact exact_alg --sthreshold threshold --outputdir output_dir_path";
|
|
159 parser = optparse.OptionParser(usage = usage);
|
|
160 parser.add_option("-v", "--version", action="store_true", dest="version",
|
|
161 default=False, help="display version and exit")
|
|
162 parser.add_option("-f", "--files", type="string",
|
|
163 action="store", dest="files", help="comma separated files path");
|
|
164 parser.add_option("-n", "--names", type="string",
|
|
165 action="store", dest="names", help="comma separated names associated to the files specified in --files");
|
|
166 parser.add_option("-s", "--sequences", type="string",
|
|
167 action="store", dest="sequences", help="contains a list of sequences (one for each row)");
|
|
168 parser.add_option("-a", "--fasta", type="string",
|
|
169 action="store", dest="fasta", help="contains the content of a fasta file");
|
|
170 parser.add_option("-x", "--search", type="string", default=0,
|
|
171 action="store", dest="search", help="search mode");
|
|
172 parser.add_option("-e", "--exact", type="int", default=0,
|
|
173 action="store", dest="exact", help="exact algorithm (required if search is 1 only)");
|
|
174 parser.add_option("-t", "--sthreshold", type="float",
|
|
175 action="store", dest="sthreshold", help="threshold applied to the search algrithm");
|
|
176 parser.add_option("-o", "--outputdir", type="string", default="output",
|
|
177 action="store", dest="outputdir", help="output directory (collection) path");
|
|
178 parser.add_option("-r", "--errorfile", type="string", default="error.log",
|
|
179 action="store", dest="errorfile", help="error file name containing error messages");
|
|
180
|
|
181 # TEST
|
|
182 #sequences = 'NM_001169378.2__tc__atttcggatgctttggagggaggaactctagtgctgcattgattggggcgtgtgttaatgatattcccagttcgcatggcgagcatcgattcctggtacgtatgtgggccccttgactcccacttatcgcacttgtcgttcgcaatttgcatgaattccgcttcgtctgaaacgcacttgcgccagacttctccggctggtctgatctggtctgtgatccggtctggtggggcgccagttgcgtttcgagctcatcaccagtcactccgcagtcgcattctgccagaggtctccgatcaagagcgcttctccattcgagattcaaacgcagcgcggtctgacgccgccacatcgagtgaaatccatatcgatggccacattcacacaggacgagatcgacttcctgcgcagccatggcaacgagctgtgtgccaagacctggctgggattgtgggatccgaagcgggctgtgcaccagcaggagcagcgcgaactgatgatggacaagtatgagcggaagcgatactacctggagccggccagtcctcttaagtcgctggccaatgcggtcaacctgaagtcgtctgctccggcgacgaaccacactcagaatggccaccaaaatgggtatgccagcatccatttgacgcctcctgctgcccagcggacctcggccaatggattgcagaaggtggccaactcgtcgagtaactcttctggaaagacctcatcctcgatcagtaggccacactataatcaccagaacaacagccaaaacaacaatcacgatgcctttggcctgggtggcggattgagcagcctgaacagcgccggttccacatccactggagctctttccgacaccagcagttgtgctagcaatggcttcggtgcggactgcgactttgtggctgactttggctcggccaacattttcgacgccacatcggcgcgttccacaggatcgccggcggtgtcgtccgtgtcctcagtgggttccagcaatggctacgccaaggtgcagcccatccgggcagctcatctccagcagcaacagcagttgcagcagcagctgcatcagcagcagctcctcaatggcaatggtcatcagggcactgagaactttgccgacttcgatcacgctcccatctacaatgcagtggctccaccgacttttaacgattggatcagcgactggagcaggcggggcttccacgatcccttcgacgattgcgatgactcgccaccaggtgcccgccctccagcacctgcgccagctcctgctcaagttcccgcagtatcatcaccattgccaaccgtccgagaagaaccagagcttgcgtggaatttttgggaggacgagatgcgaatagaggcgcaggaaaaggagtcccaaactaaacagccggagttgggctactccttttcgattagtactactacgcccctttccccttcgaatcccttcctgccctaccttgtcagtgaggagcagcatcgaaatcatccagagaagccctccttttcgtattcgttgttcagctccatatcaaatagttcgcaagaagatcaggcggatgatcatgagatgaatgttttaaatgccaatttccatgatttctttacgtggagtgctcccttgcagaacggccatacgaccagtccgcccaagggcggaaatgcagcgatggcgcccagtgaggatcgatatgccgctcttaaggatctcgacgagcagctgcgagaactgaaggccagcgaaagcgccacagagacgcccacgcccaccagtggcaatgttcaggccacagatgcctttggtggagccctcaacaacaatccaaatcccttcaagggccagcaacagcagcagctcagcagccatgtggtgaatccattccagcagcagcaacagcagcagcaccagcagaatctctatggccagttgacgctcataccaaatgcctacggcagcagttcccagcagcagatggggcaccatctcctccagcagcagcagcagcaacagcagagcttcttcaacttcaacaacaacgggttcgccatctcgcagggtctgcccaacggctgcggcttcggcagcatgcaacccgctcctgtgatggccaacaatccctttgcagccagcggcgccatgaacaccaacaatccattcttatgagactcaacccgggagaatccgcctcgcgccacctggcagaggcgctgagccagcgaacaaagagcagacgcggaggaaccgaaccgaaattagtccattttactaacaatagcgttaatctatgtatacataatgcacgccggagagcactctttgtgtacatagcccaaatatgtacacccgaaaggctccacgctgacgctagtcctcgcggatggcggaggcggactggggcgttgatatattcttttacatggtaactctactctaacgtttacggatacggatatttgtatttgccgtttgccctagaactctatacttgtactaagcgcccatgaacacttcatccactaacatagctactaatcctcatcctagtggaggatgcagttggtccagacactctgttatttgttttatccatcctcgtacttgtctttgtcccatttagcactttcgttgcggataagaactttgtcagttattgattgtgtggccttaataagattataaaactaaatattataacgtacgactatacatatacggatacagatacagattcagacacagttagtacagatacagatatacatatacgcttttgtacctaatgaattgcttcttgtttccattgctaatcatctgcttttcgtgtgctaattttatacactagtacgtgcgatatcggccgtgcagatagattgctcagctcgcgagtcaagcctcttttggttgcacccacggcagacatttgtacatatactgtctgattgtaagcctcgtgtaatacctccattaacaccactcccccaccacccatccatcgaaccccgaatccatgactcaattcactgctcacatgtccatgcccatgccttaacgtgtcaaacattatcgaagccttaaagttatttaaaactacgaaatttcaataaaaacaaataagaacgctatc';
|
|
183 #(options, args) = parser.parse_args(['-x', 'rrr', '-t', 0.5, '-s', sequences, '-o', 'collection_content']);
|
|
184
|
|
185 (options, args) = parser.parse_args();
|
|
186 if options.version:
|
|
187 print __version__;
|
|
188 else:
|
|
189 # create output dir (collection)
|
|
190 output_dir_path = options.outputdir;
|
|
191 if not os.path.exists(output_dir_path):
|
|
192 os.makedirs(output_dir_path);
|
|
193
|
|
194 return query( options, args );
|
|
195
|
|
196 if __name__ == "__main__": __main__()
|