# HG changeset patch # User Radek Brich # Date 1329239756 -3600 # Node ID 8636719a30f6b28559ab3f6f6dd3366a34551f90 # Parent 27fc0504663d246334321845d5b0d27b8b49a136 Add batchquery tool. diff -r 27fc0504663d -r 8636719a30f6 batchquery.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/batchquery.py Tue Feb 14 18:15:56 2012 +0100 @@ -0,0 +1,59 @@ +#!/usr/bin/env python3.2 + +from pgtoolkit import toolbase +from pgtoolkit.highlight import highlight + + +class BatchQueryTool(toolbase.SimpleTool): + def __init__(self): + toolbase.SimpleTool.__init__(self, name='batchquery', desc='Run a query using columns from CSV file as arguments.') + self.parser.add_argument('--query', dest='query', type=str, help='Query to run. Use %s for arguments, or %(name)s for named arguments (see --names).') + self.parser.add_argument('--file', dest='file', type=str, help='CSV file with data to use as arguments.') + self.parser.add_argument('--init', dest='init', type=str, help='Query which initialize database session (eg. temporary function).') + self.parser.add_argument('--output', dest='output', type=str, help='File name for results.') + self.parser.add_argument('--header', dest='header', action='store_true', help='First line of CSV is header with names for columns. These name can be used in query.') + self.init() + + def _split_line(self, line): + return [x.strip() for x in line.split(',')] + + def main(self): + results = [] + # load query from file + with open(self.args.query, 'r', encoding='utf8') as f: + query = f.read() + # connect DB + with self.pgm.cursor('target') as curs: + # run init query + if self.args.init: + with open(self.args.init, 'r', encoding='utf8') as f: + curs.execute(f.read(), []) + # read CSV file + with open(self.args.file, 'r', encoding='utf8') as f: + # read header + names = None + if self.args.header: + line = f.readline() + names = self._split_line(line) + # read and process lines + for line in f: + args = self._split_line(line) + if names: + args = dict(zip(names, args)) + curs.execute(query, args) + rows = curs.fetchall() + results.append((args, rows)) + curs.connection.commit() + # write results to output file + if self.args.output: + with open(self.args.output, 'w', encoding='utf8') as f: + for args, rows in results: + f.write(repr(args)) + f.write(' -> ') + f.write(repr(rows)) + f.write('\n') + + +tool = BatchQueryTool() +tool.main() + diff -r 27fc0504663d -r 8636719a30f6 pgtoolkit/toolbase.py --- a/pgtoolkit/toolbase.py Tue Feb 07 11:32:07 2012 +0100 +++ b/pgtoolkit/toolbase.py Tue Feb 14 18:15:56 2012 +0100 @@ -43,8 +43,9 @@ handler = logging.StreamHandler() handler.setFormatter(format) handler.setLevel(logging.DEBUG) - logger = logging.getLogger('pgmanager') + logger = logging.getLogger('pgmanager_sql') logger.addHandler(handler) + logger.setLevel(logging.DEBUG) def prepare_conn_from_metadb(self, name, lookup_name): '''Create connection in pgmanager using meta DB.