Mercurial > repos > iuc > query_tabular
comparison query_tabular.py @ 0:3708ff0198b7 draft
planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/tools/query_tabular commit 74915fc9cee746bbce1c4b507e13231259de177d
author | iuc |
---|---|
date | Tue, 18 Jul 2017 09:07:07 -0400 |
parents | |
children | 8a33b442ecd9 |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:3708ff0198b7 |
---|---|
1 #!/usr/bin/env python | |
2 | |
3 from __future__ import print_function | |
4 | |
5 import json | |
6 import optparse | |
7 import os.path | |
8 import sys | |
9 | |
10 from load_db import create_table | |
11 | |
12 from query_db import describe_tables, get_connection, run_query | |
13 | |
14 | |
15 """ | |
16 JSON config: | |
17 { tables : [ | |
18 { file_path : '/home/galaxy/dataset_101.dat', | |
19 table_name : 't1', | |
20 column_names : ['c1','c2','c3'], | |
21 pkey_autoincr : 'id' | |
22 comment_lines : 1 | |
23 unique: ['c1'], | |
24 index: ['c2', 'c3'] | |
25 }, | |
26 { file_path : '/home/galaxy/dataset_102.dat', | |
27 table_name : 'gff', | |
28 column_names : ['seqname',,'date','start','end'] | |
29 comment_lines : 1 | |
30 load_named_columns : True | |
31 filters : [{'filter': 'regex', 'pattern': '#peptide', | |
32 'action': 'exclude_match'}, | |
33 {'filter': 'replace', 'column': 3, | |
34 'replace': 'gi[|]', 'pattern': ''}] | |
35 }, | |
36 { file_path : '/home/galaxy/dataset_103.dat', | |
37 table_name : 'test', | |
38 column_names : ['c1', 'c2', 'c3'] | |
39 } | |
40 ] | |
41 } | |
42 """ | |
43 | |
44 | |
45 def __main__(): | |
46 # Parse Command Line | |
47 parser = optparse.OptionParser() | |
48 parser.add_option('-s', '--sqlitedb', dest='sqlitedb', default=None, | |
49 help='The SQLite Database') | |
50 parser.add_option('-j', '--jsonfile', dest='jsonfile', default=None, | |
51 help='JSON dict of table specifications') | |
52 parser.add_option('-q', '--query', dest='query', default=None, | |
53 help='SQL query') | |
54 parser.add_option('-Q', '--query_file', dest='query_file', default=None, | |
55 help='SQL query file') | |
56 parser.add_option('-n', '--no_header', dest='no_header', default=False, | |
57 action='store_true', | |
58 help='Include a column headers line') | |
59 parser.add_option('-o', '--output', dest='output', default=None, | |
60 help='Output file for query results') | |
61 (options, args) = parser.parse_args() | |
62 | |
63 # determine output destination | |
64 if options.output is not None: | |
65 try: | |
66 outputPath = os.path.abspath(options.output) | |
67 outputFile = open(outputPath, 'w') | |
68 except Exception as e: | |
69 exit('Error: %s' % (e)) | |
70 else: | |
71 outputFile = sys.stdout | |
72 | |
73 def _create_table(ti, table): | |
74 path = table['file_path'] | |
75 table_name =\ | |
76 table['table_name'] if 'table_name' in table else 't%d' % (ti + 1) | |
77 comment_lines =\ | |
78 table['comment_lines'] if 'comment_lines' in table else 0 | |
79 comment_char =\ | |
80 table['comment_char'] if 'comment_char' in table else None | |
81 column_names =\ | |
82 table['column_names'] if 'column_names' in table else None | |
83 if column_names: | |
84 load_named_columns =\ | |
85 table['load_named_columns']\ | |
86 if 'load_named_columns' in table else False | |
87 else: | |
88 load_named_columns = False | |
89 unique_indexes = table['unique'] if 'unique' in table else [] | |
90 indexes = table['index'] if 'index' in table else [] | |
91 filters = table['filters'] if 'filters' in table else None | |
92 pkey_autoincr = \ | |
93 table['pkey_autoincr'] if 'pkey_autoincr' in table else None | |
94 create_table(get_connection(options.sqlitedb), path, table_name, | |
95 pkey_autoincr=pkey_autoincr, | |
96 column_names=column_names, | |
97 skip=comment_lines, | |
98 comment_char=comment_char, | |
99 load_named_columns=load_named_columns, | |
100 filters=filters, | |
101 unique_indexes=unique_indexes, | |
102 indexes=indexes) | |
103 | |
104 if options.jsonfile: | |
105 try: | |
106 with open(options.jsonfile) as fh: | |
107 tdef = json.load(fh) | |
108 if 'tables' in tdef: | |
109 for ti, table in enumerate(tdef['tables']): | |
110 _create_table(ti, table) | |
111 except Exception as e: | |
112 exit('Error: %s' % (e)) | |
113 | |
114 query = None | |
115 if options.query_file is not None: | |
116 with open(options.query_file, 'r') as fh: | |
117 query = '' | |
118 for line in fh: | |
119 query += line | |
120 elif options.query is not None: | |
121 query = options.query | |
122 | |
123 if query is None: | |
124 try: | |
125 describe_tables(get_connection(options.sqlitedb), outputFile) | |
126 except Exception as e: | |
127 exit('Error: %s' % (e)) | |
128 else: | |
129 try: | |
130 run_query(get_connection(options.sqlitedb), query, outputFile, | |
131 no_header=options.no_header) | |
132 except Exception as e: | |
133 exit('Error: %s' % (e)) | |
134 | |
135 | |
136 if __name__ == "__main__": | |
137 __main__() |