1
|
1 from mirbase_functions import *
|
|
2 from mirbase_graphs import *
|
|
3 import time
|
|
4 from multiprocessing import Process, Queue, Lock, Pool, Manager, Value
|
|
5 import subprocess
|
|
6 import argparse
|
|
7 import sys
|
|
8
|
|
9 subprocess.call(['mkdir','-p', 'split1','split2','split3','split4','Counts','Diff/temp_con','Diff/temp_tre','Diff/n_temp_con','Diff/n_temp_tre'])
|
|
10
|
|
11 parser = argparse.ArgumentParser()
|
|
12 parser.add_argument("-analysis", "--anal", help="choose type of analysis", action="store")
|
|
13 parser.add_argument("-con", "--control", help="input fastq file (controls)", nargs='+', default=[])
|
|
14 parser.add_argument("-tre", "--treated", help="input fastq file (treated)", nargs='+', default=[] )
|
|
15 parser.add_argument("-tool_dir", "--tool_directory", help="tool directory path", action="store")
|
|
16 parser.add_argument("-gen", "--org_name", help="Organism", action="store")
|
|
17 parser.add_argument("-f", "--flag", help="choose the database (MirBase,MirGene)", action="store")
|
|
18 parser.add_argument("-percentage", "--per", help="Percentage of Samples", action="store")
|
|
19 parser.add_argument("-counts", "--count", help="Counts for filtering", action="store")
|
|
20 parser.add_argument("-name1", "--group1", help="Samples group 1", action="store")
|
|
21 parser.add_argument("-name2", "--group2", help="Samples group 2", action="store")
|
|
22 args = parser.parse_args()
|
|
23
|
|
24
|
|
25 #################################################################################################################################################################################################################
|
|
26
|
|
27 if __name__ == '__main__':
|
|
28
|
|
29 starttime = time.time()
|
|
30
|
|
31 lock = Lock()
|
|
32 manager = Manager()
|
|
33
|
|
34 # Download reference miRNA sequences from MirBase
|
|
35 mature_mirnas=manager.list()
|
|
36 ps_mature=Process(target=download_matures,args=(mature_mirnas,args.org_name))
|
|
37 ps_mature.start()
|
|
38
|
|
39
|
|
40 # Keep the names of the files and location paths
|
|
41 args.control[0]=args.control[0][1:]
|
|
42 args.control[len(args.control)-1][:-1]
|
|
43 control = [(args.control[i:i+2]) for i in range(0, len(args.control), 2)]
|
|
44
|
|
45
|
|
46 args.treated[0]=args.treated[0][1:]
|
|
47 args.treated[len(args.treated)-1][:-1]
|
|
48 treated = [(args.treated[i:i+2]) for i in range(0, len(args.treated), 2)]
|
|
49
|
|
50
|
|
51 ############## Detection of templated isoforms ################
|
|
52
|
|
53
|
|
54 # Initialization of the managers between the proccesses
|
|
55 # First group of samples (controls)
|
|
56 con_samples = manager.list() # Collapsed mirnas with the new names
|
|
57 con_data= manager.list() # keeps all necessary data for the Database
|
|
58 con_file_order=manager.list() # files' names ordered by processes
|
|
59 con_names_seqs=manager.list() # keeps only mirna names and sequences
|
|
60 deseq=manager.list()
|
|
61 con_unmap_seq=manager.Value('i',0) # keeps unmap unique sequnces for the generation of a graph
|
|
62 con_unmap_counts=manager.Value('i',0) # keeps unmap counts of sequnces for the generation of a graph
|
|
63 con_mirna_names=manager.list() # keeps the names of mirnas
|
|
64 ini_con_samples = manager.list() # filtered SAM files
|
|
65
|
|
66 # Second group of samples (treated)
|
|
67 tre_samples = manager.list()
|
|
68 tre_data = manager.list()
|
|
69 tre_file_order = manager.list()
|
|
70 tre_names_seqs=manager.list()
|
|
71 deseq1=manager.list()
|
|
72 tre_unmap_seq = manager.Value('i',0)
|
|
73 tre_unmap_counts = manager.Value('i',0)
|
|
74 tre_mirna_names=manager.list()
|
|
75 ini_tre_samples = manager.list()
|
|
76
|
|
77 # Wait for the download of reference miRNA sequences
|
|
78 ps_mature.join()
|
|
79 mature_mirnas=list(mature_mirnas)
|
|
80
|
|
81 # Processing of the detected miRNAs from SAM files
|
|
82 ps_sam = [Process(target=sam_edit,args=(mature_mirnas,path[1][:-1],path[0].split(",")[0],"c",lock,con_samples,con_data,con_file_order,con_unmap_seq,con_names_seqs,deseq,con_mirna_names,ini_con_samples,con_unmap_counts)) for path in control]
|
|
83 ps_sam.extend([Process(target=sam_edit,args=(mature_mirnas,path[1][:-1],path[0].split(",")[0],"t",lock,tre_samples,tre_data,tre_file_order,tre_unmap_seq,tre_names_seqs,deseq1,tre_mirna_names,ini_tre_samples,tre_unmap_counts)) for path in treated])
|
|
84
|
|
85 # Wait for processing of SAM files to finish
|
|
86 [p.start() for p in ps_sam]
|
|
87 [p.join() for p in ps_sam]
|
|
88
|
|
89 # Generate a histogram
|
|
90 ps_hist=[Process(target=hist_red,args=(ini_con_samples,'c',args.group1))]
|
|
91 ps_hist.extend([Process(target=hist_red,args=(ini_tre_samples,'t',args.group2))])
|
|
92 [x.start() for x in ps_hist]
|
|
93
|
|
94
|
|
95 # Convert managers to lists
|
|
96 con_samples = list(con_samples)
|
|
97 tre_samples = list(tre_samples)
|
|
98 con_file_order=list(con_file_order)
|
|
99 tre_file_order=list(tre_file_order)
|
|
100 deseq=list(deseq)
|
|
101 deseq1=list(deseq1)
|
|
102
|
|
103 # Remove duplicates and sorting
|
|
104 con_names_seqs=list(con_names_seqs)
|
|
105 con_names_seqs.sort()
|
|
106 con_names_seqs=list(con_names_seqs for con_names_seqs,_ in itertools.groupby(con_names_seqs))
|
|
107
|
|
108 tre_names_seqs=list(tre_names_seqs)
|
|
109 tre_names_seqs.sort()
|
|
110 tre_names_seqs=list(tre_names_seqs for tre_names_seqs,_ in itertools.groupby(tre_names_seqs))
|
|
111
|
|
112 # initialization of new managers
|
|
113 new_con_file_order=manager.list()
|
|
114 new_tre_file_order=manager.list()
|
|
115 new_deseq=manager.list()
|
|
116 new_deseq1=manager.list()
|
|
117
|
|
118 # add uncommon detected mirnas among the samples
|
|
119 ps_un_mirnas=[Process(target=uncommon_mirnas,args=(sampp,con_names_seqs,lock,new_deseq,con_file_order[i],new_con_file_order)) for i,sampp in enumerate(deseq)]
|
|
120 ps_un_mirnas.extend([Process(target=uncommon_mirnas,args=(sampp,tre_names_seqs,lock,new_deseq1,tre_file_order[i],new_tre_file_order)) for i,sampp in enumerate(deseq1)])
|
|
121
|
|
122 # Wait for processing of uncommon detected mirnas to finish
|
|
123 [z.start() for z in ps_un_mirnas]
|
|
124 [z.join() for z in ps_un_mirnas]
|
|
125
|
|
126 # Convert managers to lists
|
|
127 new_deseq=list(new_deseq)
|
|
128 new_deseq1=list(new_deseq1)
|
|
129 con_file_order=list(new_con_file_order)
|
|
130 tre_file_order=list(new_tre_file_order)
|
|
131
|
|
132 # Genereation of count matrices per group (controls - treated)
|
|
133 control_group=[[x[0],x[2]] for x in new_deseq[0]]
|
|
134 [control_group[i].append(y[i][1]) for i,_ in enumerate(control_group) for y in new_deseq]
|
|
135
|
|
136 treated_group=[[x[0],x[2]] for x in new_deseq1[0]]
|
|
137 [treated_group[i].append(y[i][1]) for i,_ in enumerate(treated_group) for y in new_deseq1]
|
|
138
|
|
139 # Keep a copy of count matrices
|
|
140 control_group_copy=copy.deepcopy(list(control_group))
|
|
141 treated_group_copy=copy.deepcopy(list(treated_group))
|
|
142
|
|
143 # Initialization of managers
|
|
144 merg_nam_control_group=manager.list()
|
|
145 merg_nam_treated_group=manager.list()
|
|
146
|
|
147 # Merging of names different names for the same mirna sequence per group (controls, treated) to avoid duplicates
|
|
148 ps_merge = [Process(target=merging_names,args=(control_group_copy,merg_nam_control_group))]
|
|
149 ps_merge.extend([Process(target=merging_names,args=(treated_group_copy,merg_nam_treated_group))])
|
|
150 [x.start() for x in ps_merge]
|
|
151
|
|
152
|
|
153 # Add unique mirna sequences between groups (all groups will have the same amount of sequences)
|
|
154 con_list=manager.list()
|
|
155 tre_list=manager.list()
|
|
156
|
|
157 ps_bw = [Process(target=black_white,args=(con_names_seqs,tre_names_seqs,treated_group,tre_list))]
|
|
158 ps_bw.extend([Process(target=black_white,args=(tre_names_seqs,con_names_seqs,control_group,con_list))])
|
|
159 [x.start() for x in ps_bw]
|
|
160 [x.join() for x in ps_bw]
|
|
161
|
|
162 control_group=list(con_list)
|
|
163 treated_group=list(tre_list)
|
|
164
|
|
165 # Detection of duplications
|
|
166 dupes=manager.list()
|
|
167
|
|
168 ps_dupes = Process(target=merging_dupes,args=(control_group,dupes))
|
|
169 ps_dupes.start()
|
|
170 ps_dupes.join()
|
|
171
|
|
172 dupes=list(dupes)
|
|
173
|
|
174 # Merging the duplications in one entry with all different names
|
|
175 con_list=manager.list()
|
|
176 tre_list=manager.list()
|
|
177
|
|
178 ps_ap_merg_dupes = [Process(target=apply_merging_dupes,args=(control_group,dupes,con_list))]
|
|
179 ps_ap_merg_dupes.extend([Process(target=apply_merging_dupes,args=(treated_group,dupes,tre_list))])
|
|
180 [x.start() for x in ps_ap_merg_dupes]
|
|
181
|
|
182 # Preparation of reference sequences (isodforms) for the detection of non template mirnas
|
|
183 if args.anal=="2":
|
|
184 all_iso = manager.list()
|
|
185 ps_non_iso = Process(target=non_template_ref,args=(con_samples,tre_samples,all_iso))
|
|
186 ps_non_iso.start()
|
|
187
|
|
188 # Finishing the process for merging
|
|
189 [x.join() for x in ps_merge]
|
|
190 merg_nam_control_group=list(merg_nam_control_group)
|
|
191 merg_nam_treated_group=list(merg_nam_treated_group)
|
|
192
|
|
193 # Export the database and the graphs
|
|
194 procs = [Process(target=DB_write,args=(x[0],x[1],x[2],x[3],1)) for x in con_data]
|
|
195 procs.extend([Process(target=DB_write,args=(x[0],x[1],x[2],x[3],1)) for x in tre_data])
|
|
196 procs.extend([Process(target=make_spider,args=(merg_nam_control_group,merg_nam_treated_group,args.group1,args.group2))])
|
|
197
|
|
198 if args.anal == "1":
|
23
|
199 procs.extend([Process(target=pie_temp,args=(merg_nam_control_group,con_unmap_seq.value,con_unmap_counts.value,merg_nam_treated_group,tre_unmap_seq.value,tre_unmap_counts.value,args.group1,args.group2))])
|
1
|
200
|
|
201 [p.start() for p in procs]
|
|
202
|
|
203 # Export the pdf report file
|
|
204 if args.anal=="1":
|
|
205 [x.join() for x in ps_hist]
|
|
206 [p.join() for p in procs]
|
|
207 ps_pdf = Process(target=pdf_before_DE,args=(args.anal,args.group1,args.group2))
|
|
208 ps_pdf.start()
|
|
209
|
|
210
|
|
211 [x.join() for x in ps_ap_merg_dupes]
|
|
212 control_group=list(con_list)
|
|
213 treated_group=list(tre_list)
|
|
214
|
|
215 # Filters low count mirnas (otpional)
|
|
216 if int(args.per)!=-1:
|
32
|
217 if int(args.per)>0 and int(args.per)<=100 and int(args.count)>0:
|
1
|
218
|
29
|
219 fil_con_group=manager.list()
|
|
220 fil_tre_group=manager.list()
|
1
|
221
|
29
|
222 ps_low_counts = Process(target=filter_low_counts,args=(control_group,treated_group,fil_con_group,fil_tre_group,args.per,args.count))
|
|
223 ps_low_counts.start()
|
|
224 ps_low_counts.join()
|
1
|
225
|
29
|
226 fil_con_group=list(fil_con_group)
|
|
227 fil_tre_group=list(fil_tre_group)
|
|
228 else:
|
|
229 sys.exit("Not acceptable values for filter")
|
|
230
|
1
|
231
|
|
232 if "fil_con_group" not in locals() or "fil_con_group" not in globals():
|
|
233 fil_con_group=control_group
|
|
234 fil_tre_group=treated_group
|
|
235
|
|
236 # export count matrices
|
|
237 ps_write = Process(target=write_main,args=(control_group, treated_group, fil_con_group, fil_tre_group, con_file_order,tre_file_order,1,args.group1,args.group2,args.per))
|
|
238 ps_write.start()
|
|
239
|
|
240 # export counts files compatible with Deseq2 and EdgeR
|
|
241 ps1_matrix = [Process(target=temp_counts_to_diff,args=(con_file_order,fil_con_group,"Diff/temp_con/"))]
|
|
242 ps1_matrix.extend([Process(target=temp_counts_to_diff,args=(tre_file_order,fil_tre_group,"Diff/temp_tre/"))])
|
|
243 [p.start() for p in ps1_matrix]
|
|
244
|
|
245 if args.anal=="1":
|
|
246 ps_pdf.join()
|
|
247 if args.anal=="2":
|
|
248 [p.join() for p in procs]
|
|
249 [x.join() for x in ps_hist]
|
|
250
|
|
251 ps_write.join()
|
|
252 [p.join() for p in ps1_matrix]
|
|
253
|
|
254 ############################## Detection of non-template #######################################
|
|
255
|
|
256 if args.anal == "2":
|
|
257
|
|
258 # Initialization of the managers between the proccesses
|
|
259 # First group of samples (controls)
|
|
260 n_con_data= manager.list()
|
|
261 n_con_file_order=manager.list()
|
|
262 n_con_names_seqs=manager.list()
|
|
263 n_deseq=manager.list()
|
|
264
|
|
265 # Second group of samples (treated)
|
|
266 n_tre_data = manager.list()
|
|
267 n_tre_file_order = manager.list()
|
|
268 n_tre_names_seqs=manager.list()
|
|
269 n_deseq1=manager.list()
|
|
270
|
|
271 # Preparation of reference sequences
|
|
272 new_ref_mirnas = list(mature_mirnas)
|
|
273 ps_non_iso.join()
|
|
274
|
|
275 all_iso=list(all_iso)
|
|
276 new_ref_mirnas.extend(all_iso)
|
|
277
|
|
278 # Processing of non template miRNAs from SAM files
|
|
279 ps_sam = [Process(target=non_sam_edit,args=(new_ref_mirnas,path[1][:-1],path[0].split(",")[0],"c",lock,n_con_data,n_con_file_order,n_deseq,n_con_names_seqs)) for path in control]
|
|
280 ps_sam.extend([Process(target=non_sam_edit,args=(new_ref_mirnas,path[1][:-1],path[0].split(",")[0],"t",lock,n_tre_data,n_tre_file_order,n_deseq1,n_tre_names_seqs)) for path in treated])
|
|
281
|
|
282 [p.start() for p in ps_sam]
|
|
283 [p.join() for p in ps_sam]
|
|
284
|
|
285 # Convert managers to lists
|
|
286 n_con_file_order=list(n_con_file_order)
|
|
287 n_tre_file_order=list(n_tre_file_order)
|
|
288 n_deseq=list(n_deseq)
|
|
289 n_deseq1=list(n_deseq1)
|
|
290
|
|
291 # Remove duplicates and sorting
|
|
292 n_con_names_seqs=list(n_con_names_seqs)
|
|
293 n_con_names_seqs.sort()
|
|
294 n_con_names_seqs=list(n_con_names_seqs for n_con_names_seqs,_ in itertools.groupby(n_con_names_seqs))
|
|
295
|
|
296 n_tre_names_seqs=list(n_tre_names_seqs)
|
|
297 n_tre_names_seqs.sort()
|
|
298 n_tre_names_seqs=list(n_tre_names_seqs for n_tre_names_seqs,_ in itertools.groupby(n_tre_names_seqs))
|
|
299
|
|
300 # initialization of new managers
|
|
301 new_n_con_file_order=manager.list()
|
|
302 new_n_tre_file_order=manager.list()
|
|
303 n_new_deseq=manager.list()
|
|
304 n_new_deseq1=manager.list()
|
|
305
|
|
306 # add uncommon detected mirnas among the samples
|
|
307 ps_deseq=[Process(target=uncommon_mirnas,args=(sampp,n_con_names_seqs,lock,n_new_deseq,n_con_file_order[i],new_n_con_file_order)) for i,sampp in enumerate(n_deseq)]
|
|
308 ps_deseq.extend([Process(target=uncommon_mirnas,args=(sampp,n_tre_names_seqs,lock,n_new_deseq1,n_tre_file_order[i],new_n_tre_file_order)) for i,sampp in enumerate(n_deseq1)])
|
|
309
|
|
310 # Wait for processing of uncommon detected mirnas to finish
|
|
311 [x.start() for x in ps_deseq]
|
|
312 [x.join() for x in ps_deseq]
|
|
313
|
|
314 # Convert managers to lists
|
|
315 n_new_deseq=list(n_new_deseq)
|
|
316 n_new_deseq1=list(n_new_deseq1)
|
|
317 n_con_file_order=list(new_n_con_file_order)
|
|
318 n_tre_file_order=list(new_n_tre_file_order)
|
|
319
|
|
320 # Genereation of count matrices per group (controls - treated)
|
|
321 n_control_group=[[x[0],x[2]] for x in n_new_deseq[0]]
|
|
322 [n_control_group[i].append(y[i][1]) for i,_ in enumerate(n_control_group) for y in n_new_deseq]
|
|
323
|
|
324 n_treated_group=[[x[0],x[2]] for x in n_new_deseq1[0]]
|
|
325 [n_treated_group[i].append(y[i][1]) for i,_ in enumerate(n_treated_group) for y in n_new_deseq1]
|
|
326
|
|
327 # Keep a copy of count matrices
|
|
328 n_control_group_copy=copy.deepcopy(list(n_control_group))
|
|
329 n_treated_group_copy=copy.deepcopy(list(n_treated_group))
|
|
330
|
|
331 # Initialization of managers
|
|
332 merg_nam_n_control_group=manager.list()
|
|
333 merg_nam_n_treated_group=manager.list()
|
|
334
|
|
335 # Merging of names different names for the same mirna sequence per group (controls, treated) to avoid duplicates\
|
|
336 ps_merge = [Process(target=merging_names,args=(n_control_group_copy,merg_nam_n_control_group))]
|
|
337 ps_merge.extend([Process(target=merging_names,args=(n_treated_group_copy,merg_nam_n_treated_group))])
|
|
338 [x.start() for x in ps_merge]
|
|
339
|
|
340 # Add unique mirna sequences between groups (all groups will have the same amount of sequences)
|
|
341 n_con_list=manager.list()
|
|
342 n_tre_list=manager.list()
|
|
343
|
|
344 ps_bw = [Process(target=black_white,args=(n_con_names_seqs,n_tre_names_seqs,n_treated_group,n_tre_list))]
|
|
345 ps_bw.extend([Process(target=black_white,args=(n_tre_names_seqs,n_con_names_seqs,n_control_group,n_con_list))])
|
|
346 [x.start() for x in ps_bw]
|
|
347 [x.join() for x in ps_bw]
|
|
348
|
|
349 n_control_group=list(n_con_list)
|
|
350 n_treated_group=list(n_tre_list)
|
|
351
|
|
352 # Detection of duplications
|
|
353 n_dupes=manager.list()
|
|
354
|
|
355 ps_dupes = Process(target=merging_dupes,args=(n_control_group,n_dupes))
|
|
356 ps_dupes.start()
|
|
357 ps_dupes.join()
|
|
358
|
|
359 n_dupes=list(n_dupes)
|
|
360
|
|
361 # Merging the duplications in one entry with all different names
|
|
362 n_con_list=manager.list()
|
|
363 n_tre_list=manager.list()
|
|
364
|
|
365 ps_ap_merg_dupes = [Process(target=apply_merging_dupes,args=(n_control_group,n_dupes,n_con_list))]
|
|
366 ps_ap_merg_dupes.extend([Process(target=apply_merging_dupes,args=(n_treated_group,n_dupes,n_tre_list))])
|
|
367 [x.start() for x in ps_ap_merg_dupes]
|
|
368
|
|
369 # Finishing the process for merging
|
|
370 [x.join() for x in ps_merge]
|
|
371 merg_nam_n_control_group=list(merg_nam_n_control_group)
|
|
372 merg_nam_n_treated_group=list(merg_nam_n_treated_group)
|
|
373
|
|
374 # Export the database and the graphs
|
|
375 procs = [Process(target=DB_write,args=(x[0],x[1],x[2],x[3],2)) for x in n_con_data]
|
|
376 procs.extend([Process(target=DB_write,args=(x[0],x[1],x[2],x[3],2)) for x in n_tre_data])
|
|
377 procs.extend([Process(target=logo_seq_red,args=(merg_nam_n_control_group,'c',args.group1))])
|
|
378 procs.extend([Process(target=logo_seq_red,args=(merg_nam_n_treated_group,'t',args.group2))])
|
|
379 procs.extend([Process(target=pie_non_temp,args=(merg_nam_control_group,merg_nam_n_control_group,merg_nam_treated_group,merg_nam_n_treated_group,con_unmap_seq.value,tre_unmap_seq.value,con_unmap_counts.value,tre_unmap_counts.value,args.group1,args.group2))])
|
|
380
|
|
381 [p.start() for p in procs]
|
|
382 [p.join() for p in procs]
|
|
383
|
|
384 procs1 = Process(target=pdf_before_DE,args=(args.anal,args.group1,args.group2))
|
|
385 procs1.start()
|
|
386
|
|
387 [x.join() for x in ps_ap_merg_dupes]
|
|
388 n_control_group=list(n_con_list)
|
|
389 n_treated_group=list(n_tre_list)
|
|
390
|
|
391
|
|
392 # Filters low count mirnas (otpional)
|
|
393 if int(args.per)!=-1:
|
32
|
394 if int(args.per)>0 and int(args.per)<=100 and int(args.count)>0:
|
1
|
395
|
29
|
396 n_fil_con_group=manager.list()
|
|
397 n_fil_tre_group=manager.list()
|
1
|
398
|
29
|
399 ps_low_counts = Process(target=filter_low_counts,args=(n_control_group,n_treated_group,n_fil_con_group,n_fil_tre_group,args.per,args.count))
|
|
400 ps_low_counts.start()
|
|
401 ps_low_counts.join()
|
1
|
402
|
29
|
403 n_fil_con_group=list(n_fil_con_group)
|
|
404 n_fil_tre_group=list(n_fil_tre_group)
|
|
405
|
|
406 else:
|
|
407 sys.exit("Not acceptable values for filter")
|
|
408
|
1
|
409 if "n_fil_con_group" not in locals() or "n_fil_con_group" not in globals():
|
|
410 n_fil_con_group=n_control_group
|
|
411 n_fil_tre_group=n_treated_group
|
|
412
|
|
413 ps_write = Process(target=write_main,args=(n_control_group, n_treated_group,n_fil_con_group, n_fil_tre_group, n_con_file_order, n_tre_file_order,2,args.group1,args.group2,args.per))
|
|
414 ps_write.start()
|
|
415
|
|
416 ps1_matrix = [Process(target=nontemp_counts_to_diff,args=(n_con_file_order,n_fil_con_group,con_file_order,fil_con_group,"Diff/n_temp_con/"))]
|
|
417 ps1_matrix.extend([Process(target=nontemp_counts_to_diff,args=(n_tre_file_order,n_fil_tre_group,tre_file_order,fil_tre_group,"Diff/n_temp_tre/"))])
|
|
418 [p.start() for p in ps1_matrix]
|
|
419
|
|
420 ps_write.join()
|
|
421 [p.join() for p in ps1_matrix]
|
|
422 procs1.join()
|
27
|
423 print('Running time: {} seconds'.format(round(time.time() - starttime,2)))
|
1
|
424
|