Mercurial > repos > jjohnson > query_tabular
comparison query_tabular.py @ 0:926c62f7fa09 draft
planemo upload for repository https://github.com/jj-umn/galaxytools/tree/master/query_tabular commit 9ae87502ea7c3da33ecc453872c4eb2f41ecea4a-dirty
author | jjohnson |
---|---|
date | Thu, 21 Jan 2016 08:23:45 -0500 |
parents | |
children | 3e3b3c883bec |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:926c62f7fa09 |
---|---|
1 #!/usr/bin/env python | |
2 """ | |
3 """ | |
4 import sys | |
5 import re | |
6 import os.path | |
7 import json | |
8 import sqlite3 as sqlite | |
9 import optparse | |
10 from optparse import OptionParser | |
11 | |
12 """ | |
13 TODO: | |
14 - could read column names from comment lines, but issues with legal names | |
15 - could add some transformations on tabular columns, | |
16 e.g. a regex to format date/time strings | |
17 c2 : re.sub('pat', 'sub', c2) | |
18 c3 : | |
19 - column_defs dict of columns to create from tabular input | |
20 column_defs : { 'name1' : 'expr', 'name2' : 'expr'} | |
21 - allow multiple queries and outputs | |
22 - add a --json input for table definitions (or yaml) | |
23 JSON config: | |
24 { tables : [ | |
25 { file_path : '/home/galaxy/dataset_101.dat', | |
26 table_name : 't1', | |
27 column_names : ['c1', 'c2', 'c3'], | |
28 comment_lines : 1 | |
29 }, | |
30 { file_path : '/home/galaxy/dataset_102.dat', | |
31 table_name : 't2', | |
32 column_names : ['c1', 'c2', 'c3'] | |
33 }, | |
34 { file_path : '/home/galaxy/dataset_103.dat', | |
35 table_name : 'test', | |
36 column_names : ['c1', 'c2', 'c3'] | |
37 } | |
38 ] | |
39 } | |
40 """ | |
41 | |
42 tables_query = \ | |
43 "SELECT name, sql FROM sqlite_master WHERE type='table' ORDER BY name" | |
44 | |
45 | |
46 def getValueType(val): | |
47 if val or 0. == val: | |
48 try: | |
49 int(val) | |
50 return 'INTEGER' | |
51 except: | |
52 try: | |
53 float(val) | |
54 return 'REAL' | |
55 except: | |
56 return 'TEXT' | |
57 return None | |
58 | |
59 | |
60 def get_column_def(file_path, table_name, skip=0, comment_char='#', | |
61 column_names=None, max_lines=100): | |
62 col_pref = ['TEXT', 'REAL', 'INTEGER', None] | |
63 col_types = [] | |
64 data_lines = 0 | |
65 try: | |
66 with open(file_path, "r") as fh: | |
67 for linenum, line in enumerate(fh): | |
68 if linenum < skip: | |
69 continue | |
70 if line.startswith(comment_char): | |
71 continue | |
72 data_lines += 1 | |
73 try: | |
74 fields = line.split('\t') | |
75 while len(col_types) < len(fields): | |
76 col_types.append(None) | |
77 for i, val in enumerate(fields): | |
78 colType = getValueType(val) | |
79 if col_pref.index(colType) < col_pref.index(col_types[i]): | |
80 col_types[i] = colType | |
81 except Exception, e: | |
82 print >> sys.stderr, 'Failed at line: %d err: %s' % (linenum, e) | |
83 except Exception, e: | |
84 print >> sys.stderr, 'Failed: %s' % (e) | |
85 for i, col_type in enumerate(col_types): | |
86 if not col_type: | |
87 col_types[i] = 'TEXT' | |
88 col_names = ['c%d' % i for i in range(1, len(col_types) + 1)] | |
89 if column_names: | |
90 for i, cname in enumerate([cn.strip() for cn in column_names.split(',')]): | |
91 if cname and i < len(col_names): | |
92 col_names[i] = cname | |
93 col_def = [] | |
94 for i, col_name in enumerate(col_names): | |
95 col_def.append('%s %s' % (col_names[i], col_types[i])) | |
96 return col_names, col_types, col_def | |
97 | |
98 | |
99 def create_table(conn, file_path, table_name, skip=0, comment_char='#', column_names=None): | |
100 col_names, col_types, col_def = get_column_def(file_path, table_name, skip=skip, comment_char=comment_char, column_names=column_names) | |
101 col_func = [float if t == 'REAL' else int if t == 'INTEGER' else str for t in col_types] | |
102 table_def = 'CREATE TABLE %s (\n %s\n);' % (table_name, ', \n '.join(col_def)) | |
103 # print >> sys.stdout, table_def | |
104 insert_stmt = 'INSERT INTO %s(%s) VALUES(%s)' % (table_name, ','.join(col_names), ','.join(["?" for x in col_names])) | |
105 # print >> sys.stdout, insert_stmt | |
106 data_lines = 0 | |
107 try: | |
108 c = conn.cursor() | |
109 c.execute(table_def) | |
110 with open(file_path, "r") as fh: | |
111 for linenum, line in enumerate(fh): | |
112 if linenum < skip or line.startswith(comment_char): | |
113 continue | |
114 data_lines += 1 | |
115 try: | |
116 fields = line.rstrip('\r\n').split('\t') | |
117 vals = [col_func[i](x) if x else None for i, x in enumerate(fields)] | |
118 c.execute(insert_stmt, vals) | |
119 except Exception, e: | |
120 print >> sys.stderr, 'Failed at line: %d err: %s' % (linenum, e) | |
121 conn.commit() | |
122 c.close() | |
123 except Exception, e: | |
124 print >> sys.stderr, 'Failed: %s' % (e) | |
125 exit(1) | |
126 | |
127 | |
128 def regex_match(expr, item): | |
129 return re.match(expr, item) is not None | |
130 | |
131 | |
132 def regex_search(expr, item): | |
133 return re.search(expr, item) is not None | |
134 | |
135 | |
136 def regex_sub(expr, replace, item): | |
137 return re.sub(expr, replace, item) | |
138 | |
139 | |
140 def get_connection(sqlitedb_path, addfunctions=False): | |
141 conn = sqlite.connect(sqlitedb_path) | |
142 if addfunctions: | |
143 conn.create_function("re_match", 2, regex_match) | |
144 conn.create_function("re_search", 2, regex_search) | |
145 conn.create_function("re_sub", 3, regex_sub) | |
146 return conn | |
147 | |
148 | |
149 def __main__(): | |
150 # Parse Command Line | |
151 parser = optparse.OptionParser() | |
152 parser.add_option('-s', '--sqlitedb', dest='sqlitedb', default=None, help='The SQLite Database') | |
153 parser.add_option('-t', '--table', dest='tables', action="append", default=[], help='Tabular file: file_path[=table_name[:column_name, ...]') | |
154 parser.add_option('-j', '--jsonfile', dest='jsonfile', default=None, help='Tabular file: file_path[=table_name[:column_name, ...]') | |
155 parser.add_option('-q', '--query', dest='query', default=None, help='SQL query') | |
156 parser.add_option('-Q', '--query_file', dest='query_file', default=None, help='SQL query file') | |
157 parser.add_option('-n', '--no_header', dest='no_header', action='store_true', default=False, help='Include a column headers line') | |
158 parser.add_option('-o', '--output', dest='output', default=None, help='Output file for query results') | |
159 (options, args) = parser.parse_args() | |
160 | |
161 # open sqlite connection | |
162 conn = get_connection(options.sqlitedb) | |
163 # determine output destination | |
164 if options.output is not None: | |
165 try: | |
166 outputPath = os.path.abspath(options.output) | |
167 outputFile = open(outputPath, 'w') | |
168 except Exception, e: | |
169 print >> sys.stderr, "failed: %s" % e | |
170 exit(3) | |
171 else: | |
172 outputFile = sys.stdout | |
173 | |
174 # get table defs | |
175 if options.tables: | |
176 for ti, table in enumerate(options.tables): | |
177 table_name = 't%d' % (ti + 1) | |
178 column_names = None | |
179 fields = table.split('=') | |
180 path = fields[0] | |
181 if len(fields) > 1: | |
182 names = fields[1].split(':') | |
183 table_name = names[0] if names[0] else table_name | |
184 if len(names) > 1: | |
185 column_names = names[1] | |
186 # print >> sys.stdout, '%s %s' % (table_name, path) | |
187 create_table(conn, path, table_name, column_names=column_names) | |
188 if options.jsonfile: | |
189 try: | |
190 fh = open(options.jsonfile) | |
191 tdef = json.load(fh) | |
192 if 'tables' in tdef: | |
193 for ti, table in enumerate(tdef['tables']): | |
194 path = table['file_path'] | |
195 table_name = table['table_name'] if 'table_name' in table else 't%d' % (ti + 1) | |
196 column_names = table['column_names'] if 'column_names' in table else None | |
197 comment_lines = table['comment_lines'] if 'comment_lines' in table else 0 | |
198 create_table(conn, path, table_name, column_names=column_names, skip=comment_lines) | |
199 except Exception, exc: | |
200 print >> sys.stderr, "Error: %s" % exc | |
201 conn.close() | |
202 | |
203 query = None | |
204 if (options.query_file is not None): | |
205 with open(options.query_file, 'r') as fh: | |
206 query = '' | |
207 for line in fh: | |
208 query += line | |
209 elif (options.query is not None): | |
210 query = options.query | |
211 | |
212 if (query is None): | |
213 try: | |
214 conn = get_connection(options.sqlitedb) | |
215 c = conn.cursor() | |
216 rslt = c.execute(tables_query).fetchall() | |
217 for table, sql in rslt: | |
218 print >> sys.stderr, "Table %s:" % table | |
219 try: | |
220 col_query = 'SELECT * FROM %s LIMIT 0' % table | |
221 cur = conn.cursor().execute(col_query) | |
222 cols = [col[0] for col in cur.description] | |
223 print >> sys.stderr, " Columns: %s" % cols | |
224 except Exception, exc: | |
225 print >> sys.stderr, "Error: %s" % exc | |
226 except Exception, exc: | |
227 print >> sys.stderr, "Error: %s" % exc | |
228 exit(0) | |
229 # if not sqlite.is_read_only_query(query): | |
230 # print >> sys.stderr, "Error: Must be a read only query" | |
231 # exit(2) | |
232 try: | |
233 conn = get_connection(options.sqlitedb, addfunctions=True) | |
234 cur = conn.cursor() | |
235 results = cur.execute(query) | |
236 if not options.no_header: | |
237 outputFile.write("#%s\n" % '\t'.join([str(col[0]) for col in cur.description])) | |
238 # yield [col[0] for col in cur.description] | |
239 for i, row in enumerate(results): | |
240 # yield [val for val in row] | |
241 outputFile.write("%s\n" % '\t'.join([str(val) for val in row])) | |
242 except Exception, exc: | |
243 print >> sys.stderr, "Error: %s" % exc | |
244 exit(1) | |
245 | |
246 if __name__ == "__main__": | |
247 __main__() |