4
+ − 1 #!/usr/bin/env python
+ − 2
+ − 3 # https://github.com/ross/requests-futures
+ − 4 # http://docs.python-requests.org/en/master/user/quickstart/#more-complicated-post-requests
+ − 5
12
+ − 6 import sys, os, uuid, optparse, requests, json, time
4
+ − 7 #from requests_futures.sessions import FuturesSession
+ − 8
+ − 9 #### NN14 ####
10
+ − 10 SERVICE_URL = "http://nn14.galaxyproject.org:8080/";
4
+ − 11 #service_url = "http://127.0.0.1:8082/";
10
+ − 12 QUERY_URL = SERVICE_URL+"tree/0/query";
18
+ − 13 STATUS_URL = SERVICE_URL+"status/<query_id>";
4
+ − 14 ##############
6
+ − 15 # query delay in seconds
10
+ − 16 QUERY_DELAY = 30;
6
+ − 17 ##############
4
+ − 18
12
+ − 19 __version__ = "1.0.0";
10
+ − 20 VALID_CHARS = '.-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ '
14
+ − 21 # in the case of collections, exitcodes equal to 0 and 1 are not considered errors
+ − 22 ERR_EXIT_CODE = 2;
+ − 23 OK_EXIT_CODE = 0;
10
+ − 24
14
+ − 25 def raiseException( exitcode, message, output_dir_path, errorfilename ):
+ − 26 errorfilepath = os.path.join(output_dir_path, errorfilename+"_txt");
12
+ − 27 with open(errorfilepath, 'w') as out:
+ − 28 out.write(message);
+ − 29 sys.exit(exitcode);
+ − 30
4
+ − 31 def query_request( options, args, payload ):
12
+ − 32 output_dir_path = options.outputdir;
4
+ − 33 # add additional parameters to the payload
+ − 34 #payload["tree_id"] = str(options.treeid);
+ − 35 payload["search_mode"] = str(options.search);
+ − 36 payload["exact_algorithm"] = int(options.exact);
+ − 37 payload["search_threshold"] = float(options.sthreshold);
+ − 38 # set the content type to application/json
+ − 39 headers = {'Content-type': 'application/json'};
+ − 40
+ − 41 # create a session
+ − 42 session = requests.Session();
+ − 43 # make a synchronous post request to the query route
10
+ − 44 req = session.post(QUERY_URL, headers=headers, json=payload);
4
+ − 45 resp_code = req.status_code;
12
+ − 46 #print(str(req.content)+"\n\n");
4
+ − 47 if resp_code == requests.codes.ok:
+ − 48 resp_content = str(req.content);
+ − 49 # convert out to json
+ − 50 json_content = json.loads(resp_content);
18
+ − 51 # retrieve query id
+ − 52 query_id = json_content['query_id'];
+ − 53 query_processed = False;
4
+ − 54 # results json content
+ − 55 json_status_content = None;
18
+ − 56 while query_processed is False:
4
+ − 57 # create a new session
+ − 58 session = requests.Session();
+ − 59 # make a synchronous get request to the status route
18
+ − 60 status_query_url = STATUS_URL.replace("<query_id>", query_id);
4
+ − 61 status_req = session.get(status_query_url);
+ − 62 status_resp_content = str(status_req.content);
12
+ − 63 #print(status_resp_content+"\n\n");
4
+ − 64 # convert out to json
+ − 65 json_status_content = json.loads(status_resp_content);
+ − 66 # take a look at the state
+ − 67 # state attribute is always available
+ − 68 if json_status_content['state'] == 'SUCCESS':
18
+ − 69 query_processed = True;
4
+ − 70 break;
+ − 71 elif json_status_content['state'] in ['FAILURE', 'REVOKED']:
18
+ − 72 return raiseException( ERR_EXIT_CODE, "Query ID: "+str(query_id)+"\nQuery status: "+str(json_status_content['state']), output_dir_path, str(options.errorfile) );
4
+ − 73 else:
10
+ − 74 time.sleep(QUERY_DELAY); # in seconds
4
+ − 75
10
+ − 76 out_file_format = "tabular";
4
+ − 77 for block in json_status_content['results']:
+ − 78 seq_id = block['sequence_id'];
+ − 79 accessions = block['accession_numbers'];
+ − 80 # put response block in the output collection
+ − 81 output_file_path = os.path.join(output_dir_path, seq_id + "_" + out_file_format);
+ − 82 accessions_list = "";
+ − 83 for accession_number in accessions:
+ − 84 accessions_list = accessions_list + accession_number + "\n";
+ − 85 with open(output_file_path, 'w') as out:
+ − 86 out.write(accessions_list.strip());
14
+ − 87 return sys.exit(OK_EXIT_CODE);
4
+ − 88 else:
14
+ − 89 return raiseException( ERR_EXIT_CODE, "Unable to query the remote server. Please try again in a while.", output_dir_path, str(options.errorfile) );
4
+ − 90
+ − 91 def query( options, args ):
12
+ − 92 output_dir_path = options.outputdir;
4
+ − 93 multiple_data = {};
+ − 94 comma_sep_file_paths = options.files;
+ − 95 #print("files: "+str(comma_sep_file_paths)+" - "+str(type(comma_sep_file_paths)));
+ − 96 # check if options.files contains at least one file path
+ − 97 if comma_sep_file_paths is not None:
+ − 98 # split file paths
+ − 99 file_paths = comma_sep_file_paths.split(",");
+ − 100 # split file names
+ − 101 comma_sep_file_names = str(options.names);
+ − 102 #print("names: "+str(comma_sep_file_names));
+ − 103 file_names = comma_sep_file_names.split(",");
+ − 104 for idx, file_path in enumerate(file_paths):
+ − 105 #file_name = file_names[idx];
+ − 106 with open(file_path, 'r') as content_file:
+ − 107 for line in content_file:
+ − 108 if line.strip() != "":
8
+ − 109 line_split = line.strip().split("\t"); # split on tab
4
+ − 110 if len(line_split) == 2: # 0:id , 1:seq , otherwise skip line
+ − 111 seq_id = line_split[0];
10
+ − 112 # fix seq_id using valid chars only
+ − 113 seq_id = ''.join(e for e in seq_id if e in VALID_CHARS)
4
+ − 114 seq_text = line_split[1];
+ − 115 if seq_id in multiple_data:
14
+ − 116 return raiseException( ERR_EXIT_CODE, "Error: the id '"+seq_id+"' is duplicated", output_dir_path, str(options.errorfile) );
4
+ − 117 multiple_data[seq_id] = seq_text;
+ − 118 if len(multiple_data) > 0:
6
+ − 119 return query_request( options, args, multiple_data );
4
+ − 120 #return echo( options, args );
+ − 121 else:
14
+ − 122 return raiseException( ERR_EXIT_CODE, "An error has occurred. Please be sure that your input files are valid.", output_dir_path, str(options.errorfile) );
4
+ − 123 else:
+ − 124 # try with the sequence in --sequence
+ − 125 text_content = options.sequences;
+ − 126 #print("sequences: "+text_content);
+ − 127 # check if options.sequences contains a list of sequences (one for each row)
+ − 128 if text_content is not None:
+ − 129 text_content = str(text_content);
+ − 130 if text_content.strip():
+ − 131 # populate a dictionary with the files containing the sequences to query
+ − 132 text_content = text_content.strip().split("__cn__"); # split on new line
+ − 133 for line in text_content:
+ − 134 if line.strip() != "":
+ − 135 line_split = line.strip().split("__tc__"); # split on tab
+ − 136 if len(line_split) == 2: # 0:id , 1:seq , otherwise skip line
+ − 137 seq_id = line_split[0];
10
+ − 138 # fix seq_id using valid chars only
+ − 139 seq_id = ''.join(e for e in seq_id if e in VALID_CHARS)
4
+ − 140 seq_text = line_split[1];
+ − 141 if seq_id in multiple_data:
14
+ − 142 return raiseException( ERR_EXIT_CODE, "Error: the id '"+seq_id+"' is duplicated", output_dir_path, str(options.errorfile) );
4
+ − 143 multiple_data[seq_id] = seq_text;
+ − 144 if len(multiple_data) > 0:
6
+ − 145 return query_request( options, args, multiple_data );
4
+ − 146 #return echo( options, args );
+ − 147 else:
14
+ − 148 return raiseException( ERR_EXIT_CODE, "An error has occurred. Please be sure that your input files are valid.", output_dir_path, str(options.errorfile) );
4
+ − 149 else:
14
+ − 150 return raiseException( ERR_EXIT_CODE, "You have to insert at least one row formatted as a tab delimited (ID, SEQUENCE) couple", output_dir_path, str(options.errorfile) );
+ − 151 return ERR_EXIT_CODE;
4
+ − 152
+ − 153 def __main__():
+ − 154 # Parse the command line options
+ − 155 usage = "Usage: query.py --files comma_sep_file_paths --names comma_seq_file_names --sequences sequences_text --search search_mode --exact exact_alg --sthreshold threshold --outputdir output_dir_path";
+ − 156 parser = optparse.OptionParser(usage = usage);
12
+ − 157 parser.add_option("-v", "--version", action="store_true", dest="version",
+ − 158 default=False, help="display version and exit")
4
+ − 159 parser.add_option("-f", "--files", type="string",
+ − 160 action="store", dest="files", help="comma separated files path");
+ − 161 parser.add_option("-n", "--names", type="string",
+ − 162 action="store", dest="names", help="comma separated names associated to the files specified in --files");
+ − 163 parser.add_option("-s", "--sequences", type="string",
+ − 164 action="store", dest="sequences", help="contains a list of sequences (one for each row)");
+ − 165 parser.add_option("-a", "--fasta", type="string",
+ − 166 action="store", dest="fasta", help="contains the content of a fasta file");
+ − 167 parser.add_option("-x", "--search", type="string", default=0,
+ − 168 action="store", dest="search", help="search mode");
+ − 169 parser.add_option("-e", "--exact", type="int", default=0,
+ − 170 action="store", dest="exact", help="exact algorithm (required if search is 1 only)");
+ − 171 parser.add_option("-t", "--sthreshold", type="float",
+ − 172 action="store", dest="sthreshold", help="threshold applied to the search algrithm");
12
+ − 173 parser.add_option("-o", "--outputdir", type="string", default="output",
4
+ − 174 action="store", dest="outputdir", help="output directory (collection) path");
12
+ − 175 parser.add_option("-r", "--errorfile", type="string", default="error.log",
+ − 176 action="store", dest="errorfile", help="error file name containing error messages");
4
+ − 177
+ − 178 # TEST
6
+ − 179 #sequences = 'NM_001169378.2__tc__atttcggatgctttggagggaggaactctagtgctgcattgattggggcgtgtgttaatgatattcccagttcgcatggcgagcatcgattcctggtacgtatgtgggccccttgactcccacttatcgcacttgtcgttcgcaatttgcatgaattccgcttcgtctgaaacgcacttgcgccagacttctccggctggtctgatctggtctgtgatccggtctggtggggcgccagttgcgtttcgagctcatcaccagtcactccgcagtcgcattctgccagaggtctccgatcaagagcgcttctccattcgagattcaaacgcagcgcggtctgacgccgccacatcgagtgaaatccatatcgatggccacattcacacaggacgagatcgacttcctgcgcagccatggcaacgagctgtgtgccaagacctggctgggattgtgggatccgaagcgggctgtgcaccagcaggagcagcgcgaactgatgatggacaagtatgagcggaagcgatactacctggagccggccagtcctcttaagtcgctggccaatgcggtcaacctgaagtcgtctgctccggcgacgaaccacactcagaatggccaccaaaatgggtatgccagcatccatttgacgcctcctgctgcccagcggacctcggccaatggattgcagaaggtggccaactcgtcgagtaactcttctggaaagacctcatcctcgatcagtaggccacactataatcaccagaacaacagccaaaacaacaatcacgatgcctttggcctgggtggcggattgagcagcctgaacagcgccggttccacatccactggagctctttccgacaccagcagttgtgctagcaatggcttcggtgcggactgcgactttgtggctgactttggctcggccaacattttcgacgccacatcggcgcgttccacaggatcgccggcggtgtcgtccgtgtcctcagtgggttccagcaatggctacgccaaggtgcagcccatccgggcagctcatctccagcagcaacagcagttgcagcagcagctgcatcagcagcagctcctcaatggcaatggtcatcagggcactgagaactttgccgacttcgatcacgctcccatctacaatgcagtggctccaccgacttttaacgattggatcagcgactggagcaggcggggcttccacgatcccttcgacgattgcgatgactcgccaccaggtgcccgccctccagcacctgcgccagctcctgctcaagttcccgcagtatcatcaccattgccaaccgtccgagaagaaccagagcttgcgtggaatttttgggaggacgagatgcgaatagaggcgcaggaaaaggagtcccaaactaaacagccggagttgggctactccttttcgattagtactactacgcccctttccccttcgaatcccttcctgccctaccttgtcagtgaggagcagcatcgaaatcatccagagaagccctccttttcgtattcgttgttcagctccatatcaaatagttcgcaagaagatcaggcggatgatcatgagatgaatgttttaaatgccaatttccatgatttctttacgtggagtgctcccttgcagaacggccatacgaccagtccgcccaagggcggaaatgcagcgatggcgcccagtgaggatcgatatgccgctcttaaggatctcgacgagcagctgcgagaactgaaggccagcgaaagcgccacagagacgcccacgcccaccagtggcaatgttcaggccacagatgcctttggtggagccctcaacaacaatccaaatcccttcaagggccagcaacagcagcagctcagcagccatgtggtgaatccattccagcagcagcaacagcagcagcaccagcagaatctctatggccagttgacgctcataccaaatgcctacggcagcagttcccagcagcagatggggcaccatctcctccagcagcagcagcagcaacagcagagcttcttcaacttcaacaacaacgggttcgccatctcgcagggtctgcccaacggctgcggcttcggcagcatgcaacccgctcctgtgatggccaacaatccctttgcagccagcggcgccatgaacaccaacaatccattcttatgagactcaacccgggagaatccgcctcgcgccacctggcagaggcgctgagccagcgaacaaagagcagacgcggaggaaccgaaccgaaattagtccattttactaacaatagcgttaatctatgtatacataatgcacgccggagagcactctttgtgtacatagcccaaatatgtacacccgaaaggctccacgctgacgctagtcctcgcggatggcggaggcggactggggcgttgatatattcttttacatggtaactctactctaacgtttacggatacggatatttgtatttgccgtttgccctagaactctatacttgtactaagcgcccatgaacacttcatccactaacatagctactaatcctcatcctagtggaggatgcagttggtccagacactctgttatttgttttatccatcctcgtacttgtctttgtcccatttagcactttcgttgcggataagaactttgtcagttattgattgtgtggccttaataagattataaaactaaatattataacgtacgactatacatatacggatacagatacagattcagacacagttagtacagatacagatatacatatacgcttttgtacctaatgaattgcttcttgtttccattgctaatcatctgcttttcgtgtgctaattttatacactagtacgtgcgatatcggccgtgcagatagattgctcagctcgcgagtcaagcctcttttggttgcacccacggcagacatttgtacatatactgtctgattgtaagcctcgtgtaatacctccattaacaccactcccccaccacccatccatcgaaccccgaatccatgactcaattcactgctcacatgtccatgcccatgccttaacgtgtcaaacattatcgaagccttaaagttatttaaaactacgaaatttcaataaaaacaaataagaacgctatc';
4
+ − 180 #(options, args) = parser.parse_args(['-x', 'rrr', '-t', 0.5, '-s', sequences, '-o', 'collection_content']);
12
+ − 181
4
+ − 182 (options, args) = parser.parse_args();
12
+ − 183 if options.version:
+ − 184 print __version__;
+ − 185 else:
+ − 186 # create output dir (collection)
+ − 187 output_dir_path = options.outputdir;
+ − 188 if not os.path.exists(output_dir_path):
+ − 189 os.makedirs(output_dir_path);
+ − 190
+ − 191 return query( options, args );
4
+ − 192
+ − 193 if __name__ == "__main__": __main__()