1
2
3
4 raise ImportError('This module is deprecated. Use gmPG2.py.')
5
6
7
8 """Broker for PostgreSQL distributed backend connections.
9
10 @copyright: author
11
12 TODO: iterator/generator batch fetching:
13 - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad
14 - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4"
15
16 winner:
17 def resultset_functional_batchgenerator(cursor, size=100):
18 for results in iter(lambda: cursor.fetchmany(size), []):
19 for rec in results:
20 yield rec
21 """
22
23
24 __version__ = "$Revision: 1.90 $"
25 __author__ = "H.Herb <hherb@gnumed.net>, I.Haywood <i.haywood@ugrad.unimelb.edu.au>, K.Hilbert <Karsten.Hilbert@gmx.net>"
26 __license__ = 'GPL (details at http://www.gnu.org)'
27
28 print "gmPG phased out, please replace with gmPG2"
29
30 import sys
31 sys.exit
32
33 _query_logging_verbosity = 1
34
35
36 assert(float(dbapi.apilevel) >= 2.0)
37 assert(dbapi.threadsafety > 0)
38 assert(dbapi.paramstyle == 'pyformat')
39
40 _listener_api = None
41
42
43 _default_client_encoding = {'wire': None, 'string': None}
44
45
46
47 if time.daylight:
48 tz = time.altzone
49 else:
50 tz = time.timezone
51
52
53 _default_client_timezone = "%+.1f" % (-tz / 3600.0)
54
55 _serialize_failure = "serialize access due to concurrent update"
56
57
58
59
60 QTablePrimaryKeyIndex = """
61 SELECT
62 indkey
63 FROM
64 pg_index
65 WHERE
66 indrelid =
67 (SELECT oid FROM pg_class WHERE relname = '%s');
68 """
69
70 query_pkey_name = """
71 SELECT
72 pga.attname
73 FROM
74 (pg_attribute pga inner join pg_index pgi on (pga.attrelid=pgi.indrelid))
75 WHERE
76 pga.attnum=pgi.indkey[0]
77 and
78 pgi.indisprimary is true
79 and
80 pga.attrelid=(SELECT oid FROM pg_class WHERE relname = %s)"""
81
82 query_fkey_names = """
83 select tgargs from pg_trigger where
84 tgname like 'RI%%'
85 and
86 tgrelid = (
87 select oid from pg_class where relname=%s
88 )
89 """
90
91
92 query_table_col_defs = """select
93 cols.column_name,
94 cols.udt_name
95 from
96 information_schema.columns cols
97 where
98 cols.table_schema = %s
99 and
100 cols.table_name = %s
101 order by
102 cols.ordinal_position"""
103
104 query_table_attributes = """select
105 cols.column_name
106 from
107 information_schema.columns cols
108 where
109 cols.table_schema = %s
110 and
111 cols.table_name = %s
112 order by
113 cols.ordinal_position"""
114
115 query_child_tables = """
116 select
117 pgn.nspname as namespace,
118 pgc.relname as table
119 from
120 pg_namespace pgn,
121 pg_class pgc
122 where
123 pgc.relnamespace = pgn.oid
124 and
125 pgc.oid in (
126 select inhrelid from pg_inherits where inhparent = (
127 select oid from pg_class where
128 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and
129 relname = %(table)s
130 )
131 )"""
132
133
134
135 last_ro_cursor_desc = None
136
137
139 "maintains a static dictionary of available database connections"
140
141
142 __ro_conns = {}
143
144 __service2db_map = {}
145
146 __conn_use_count = {}
147
148 __is_connected = None
149
150 __listeners = {}
151
152 __login = None
153
154 - def __init__(self, login=None, encoding=None):
163
166
167
168
169
170
171
172 - def GetConnection(self, service="default", readonly=1, encoding=None, extra_verbose=None):
202
215
218
219 - def get_connection_for_user(self, user=None, password=None, service="default", encoding=None, extra_verbose=None):
220 """Get a connection for a given user.
221
222 This will return a connection just as GetConnection() would
223 except that the user to be used for authentication can be
224 specified. All the other parameters are going to be the
225 same, IOW it will connect to the same server, port and database
226 as any other connection obtained through this broker.
227
228 You will have to specify the password, of course, if it
229 is needed for PostgreSQL authentication.
230
231 This will always return a read-write connection.
232 """
233 if user is None:
234 _log.Log(gmLog.lErr, 'user must be given')
235 raise ValueError, 'gmPG.py::%s.get_connection_for_user(): user name must be given' % self.__class__.__name__
236
237 logininfo = self.GetLoginInfoFor(service)
238 logininfo.SetUser(user=user)
239 logininfo.SetPassword(passwd=password)
240
241 _log.Log(gmLog.lData, "requesting RW connection to service [%s]" % service)
242 conn = self.__pgconnect(logininfo, readonly = 0, encoding = encoding)
243 if conn is None:
244 return None
245
246 if extra_verbose:
247 conn.conn.toggleShowQuery
248
249 return conn
250
251
252
253 - def Listen(self, service, signal, callback):
254 """Listen to 'signal' from backend in an asynchronous thread.
255
256 If 'signal' is received from database 'service', activate
257 the 'callback' function"""
258
259
260
261 if _listener_api is None:
262 if not _import_listener_engine():
263 _log.Log(gmLog.lErr, 'cannot load backend listener code')
264 return None
265
266
267 try:
268 backend = ConnectionPool.__service2db_map[service]
269 except KeyError:
270 backend = 0
271 _log.Log(gmLog.lData, "connecting notification [%s] from service [%s] (id %s) with callback %s" % (signal, service, backend, callback))
272
273
274 if backend not in ConnectionPool.__listeners.keys():
275 auth = self.GetLoginInfoFor(service)
276 listener = _listener_api.BackendListener(
277 service,
278 auth.GetDatabase(),
279 auth.GetUser(),
280 auth.GetPassword(),
281 auth.GetHost(),
282 int(auth.GetPort())
283 )
284 ConnectionPool.__listeners[backend] = listener
285
286 listener = ConnectionPool.__listeners[backend]
287 listener.register_callback(signal, callback)
288 return 1
289
290 - def Unlisten(self, service, signal, callback):
301
303 try:
304 backend = self.__service2db_map[service]
305 except KeyError:
306 _log.Log(gmLog.lWarn, 'cannot stop listener on backend')
307 return None
308 try:
309 ConnectionPool.__listeners[backend].stop_thread()
310 del ConnectionPool.__listeners[backend]
311 except:
312 _log.LogException('cannot stop listener on backend [%s]' % backend, sys.exc_info(), verbose = 0)
313 return None
314 return 1
315
324
325
326
328 """list all distributed services available on this system
329 (according to configuration database)"""
330 return ConnectionPool.__ro_conns.keys()
331
333 """return login information for a particular service"""
334 if login is None:
335 dblogin = ConnectionPool.__login
336 else:
337 dblogin = copy.deepcopy(login)
338
339 try:
340 srvc_id = ConnectionPool.__service2db_map[service]
341 except KeyError:
342 return dblogin
343
344 if srvc_id == 0:
345 return dblogin
346
347
348 cfg_db = ConnectionPool.__ro_conns['default']
349 cursor = cfg_db.cursor()
350 cmd = "select name, host, port from cfg.db where pk=%s"
351 if not run_query(cursor, None, cmd, srvc_id):
352 _log.Log(gmLog.lPanic, 'cannot get login info for service [%s] with id [%s] from config database' % (service, srvc_id))
353 _log.Log(gmLog.lPanic, 'make sure your service-to-database mappings are properly configured')
354 _log.Log(gmLog.lWarn, 'trying to make do with default login parameters')
355 return dblogin
356 auth_data = cursor.fetchone()
357 idx = get_col_indices(cursor)
358 cursor.close()
359
360 try:
361 dblogin.SetDatabase(string.strip(auth_data[idx['name']]))
362 except: pass
363 try:
364 dblogin.SetHost(string.strip(auth_data[idx['host']]))
365 except: pass
366 try:
367 dblogin.SetPort(auth_data[idx['port']])
368 except: pass
369
370 return dblogin
371
372
373
375 """Initialize connections to all servers."""
376 if login is None and ConnectionPool.__is_connected is None:
377 try:
378 login = request_login_params()
379 except:
380 _log.LogException("Exception: Cannot connect to databases without login information !", sys.exc_info(), verbose=1)
381 raise gmExceptions.ConnectionError("Can't connect to database without login information!")
382
383 _log.Log(gmLog.lData, login.GetInfoStr())
384 ConnectionPool.__login = login
385
386
387 cfg_db = self.__pgconnect(login, readonly=1, encoding=encoding)
388 if cfg_db is None:
389 raise gmExceptions.ConnectionError, _('Cannot connect to configuration database with:\n\n[%s]') % login.GetInfoStr()
390
391
392 ConnectionPool.__ro_conns['default'] = cfg_db
393 cursor = cfg_db.cursor()
394
395 cursor.execute("select version()")
396 _log.Log(gmLog.lInfo, 'service [default/config] running on [%s]' % cursor.fetchone()[0])
397
398 cmd = "select name from cfg.distributed_db"
399 if not run_query(cursor, None, cmd):
400 cursor.close()
401 raise gmExceptions.ConnectionError("cannot load service names from configuration database")
402 services = cursor.fetchall()
403 for service in services:
404 ConnectionPool.__service2db_map[service[0]] = 0
405
406
407
408 cmd = "select * from cfg.config where profile=%s"
409 if not run_query(cursor, None, cmd, login.GetProfile()):
410 cursor.close()
411 raise gmExceptions.ConnectionError("cannot load user profile [%s] from database" % login.GetProfile())
412 databases = cursor.fetchall()
413 dbidx = get_col_indices(cursor)
414
415
416 for db in databases:
417
418 cursor.execute("select name from cfg.distributed_db where pk=%d" % db[dbidx['ddb']])
419 service = string.strip(cursor.fetchone()[0])
420
421 _log.Log(gmLog.lData, "mapping service [%s] to DB ID [%s]" % (service, db[dbidx['db']]))
422 ConnectionPool.__service2db_map[service] = db[dbidx['db']]
423
424 ConnectionPool.__conn_use_count[service] = 0
425 dblogin = self.GetLoginInfoFor(service, login)
426
427 conn = self.__pgconnect(dblogin, readonly=1, encoding=encoding)
428 if conn is None:
429 raise gmExceptions.ConnectionError, _('Cannot connect to database with:\n\n[%s]') % login.GetInfoStr()
430 ConnectionPool.__ro_conns[service] = conn
431
432 cursor.execute("select version()")
433 _log.Log(gmLog.lInfo, 'service [%s] running on [%s]' % (service, cursor.fetchone()[0]))
434 cursor.close()
435 ConnectionPool.__is_connected = 1
436 return ConnectionPool.__is_connected
437
438 - def __pgconnect(self, login, readonly=1, encoding=None):
439 """Connect to a postgres backend as specified by login object.
440
441 - returns a connection object
442 - encoding works like this:
443 - encoding specified in the call to __pgconnect() overrides
444 - encoding set by a call to gmPG.set_default_encoding() overrides
445 - encoding taken from Python string encoding
446 - wire_encoding and string_encoding must essentially just be different
447 names for one and the same (IOW entirely compatible) encodings, such
448 as "win1250" and "cp1250"
449 """
450 dsn = ""
451 hostport = ""
452 dsn = login.GetDBAPI_DSN()
453 hostport = "0"
454
455 if encoding is None:
456 encoding = _default_client_encoding
457
458
459
460
461 string_encoding = encoding['string']
462 if string_encoding is None:
463 string_encoding = _default_client_encoding['string']
464 if string_encoding is None:
465
466 string_encoding = locale.getlocale()[1]
467 _log.Log(gmLog.lWarn, 'client encoding not specified, this may lead to data corruption in some cases')
468 _log.Log(gmLog.lWarn, 'therefore the string encoding currently set in the active locale is used: [%s]' % string_encoding)
469 _log.Log(gmLog.lWarn, 'for this to have any chance to work the application MUST have called locale.setlocale() before')
470 _log.Log(gmLog.lInfo, 'using string encoding [%s] to encode Unicode strings for transmission to the database' % string_encoding)
471
472
473
474
475 wire_encoding = encoding['wire']
476 if wire_encoding is None:
477 wire_encoding = _default_client_encoding['wire']
478 if wire_encoding is None:
479 wire_encoding = string_encoding
480 if wire_encoding is None:
481 raise ValueError, '<wire_encoding> cannot be None'
482
483 try:
484
485 conn = dbapi.connect(dsn=dsn, client_encoding=(string_encoding, 'strict'), unicode_results=1)
486 except StandardError:
487 _log.LogException("database connection failed: DSN = [%s], host:port = [%s]" % (dsn, hostport), sys.exc_info(), verbose = 1)
488 return None
489
490
491 curs = conn.cursor()
492
493
494 cmd = "set client_encoding to '%s'" % wire_encoding
495 try:
496 curs.execute(cmd)
497 except:
498 curs.close()
499 conn.close()
500 _log.Log(gmLog.lErr, 'query [%s]' % cmd)
501 _log.LogException (
502 'cannot set string-on-the-wire client_encoding on connection to [%s], this would likely lead to data corruption' % wire_encoding,
503 sys.exc_info(),
504 verbose = _query_logging_verbosity
505 )
506 raise
507 _log.Log(gmLog.lData, 'string-on-the-wire client_encoding set to [%s]' % wire_encoding)
508
509
510
511 cmd = "set time zone '%s'" % _default_client_timezone
512 if not run_query(curs, None, cmd):
513 _log.Log(gmLog.lErr, 'cannot set client time zone to [%s]' % _default_client_timezone)
514 _log.Log(gmLog.lWarn, 'not setting this will lead to incorrect dates/times')
515 else:
516 _log.Log (gmLog.lData, 'time zone set to [%s]' % _default_client_timezone)
517
518
519
520 cmd = "set datestyle to 'ISO'"
521 if not run_query(curs, None, cmd):
522 _log.Log(gmLog.lErr, 'cannot set client date style to ISO')
523 _log.Log(gmLog.lWarn, 'you better use other means to make your server delivers valid ISO timestamps with time zone')
524
525
526 if readonly:
527 isolation_level = 'READ COMMITTED'
528 else:
529 isolation_level = 'SERIALIZABLE'
530 cmd = 'set session characteristics as transaction isolation level %s' % isolation_level
531 if not run_query(curs, None, cmd):
532 curs.close()
533 conn.close()
534 _log.Log(gmLog.lErr, 'cannot set connection characteristics to [%s]' % isolation_level)
535 return None
536
537
538 if readonly:
539 access_mode = 'READ ONLY'
540 else:
541 access_mode = 'READ WRITE'
542 _log.Log(gmLog.lData, "setting session to [%s] for %s@%s:%s" % (access_mode, login.GetUser(), login.GetHost(), login.GetDatabase()))
543 cmd = 'set session characteristics as transaction %s' % access_mode
544 if not run_query(curs, 0, cmd):
545 _log.Log(gmLog.lErr, 'cannot set connection characteristics to [%s]' % access_mode)
546 curs.close()
547 conn.close()
548 return None
549
550 conn.commit()
551 curs.close()
552 return conn
553
580
581
582
583
585 "returns the attribute names of the fetched rows in natural sequence as a list"
586 names=[]
587 for d in cursor.description:
588 names.append(d[0])
589 return names
590
591 -def run_query(aCursor=None, verbosity=None, aQuery=None, *args):
592
593 if aCursor is None:
594 _log.Log(gmLog.lErr, 'need cursor to run query')
595 return None
596 if aQuery is None:
597 _log.Log(gmLog.lErr, 'need query to run it')
598 return None
599 if verbosity is None:
600 verbosity = _query_logging_verbosity
601
602
603 try:
604 aCursor.execute(aQuery, *args)
605 except:
606 _log.LogException("query >>>%s<<< with args >>>%s<<< failed" % (aQuery, args), sys.exc_info(), verbose = verbosity)
607 return None
608
609
610 return 1
611
612 -def run_commit2(link_obj=None, queries=None, end_tx=False, max_tries=1, extra_verbose=False, get_col_idx = False):
613 """Convenience function for running a transaction
614 that is supposed to get committed.
615
616 <link_obj>
617 can be either:
618 - a cursor
619 - a connection
620 - a service name
621
622 <queries>
623 is a list of (query, [args]) tuples to be
624 executed as a single transaction, the last
625 query may usefully return rows (such as a
626 "select currval('some_sequence')" statement)
627
628 <end_tx>
629 - controls whether the transaction is finalized (eg.
630 committed/rolled back) or not, this allows the
631 call to run_commit2() to be part of a framing
632 transaction
633 - if <link_obj> is a service name the transaction is
634 always finalized regardless of what <end_tx> says
635 - if link_obj is a connection then <end_tx> will
636 default to False unless it is explicitly set to
637 True which is taken to mean "yes, you do have full
638 control over the transaction" in which case the
639 transaction is properly finalized
640
641 <max_tries>
642 - controls the number of times a transaction is retried
643 after a concurrency error
644 - note that *all* <queries> are rerun if a concurrency
645 error occurrs
646 - max_tries is honored if and only if link_obj is a service
647 name such that we have full control over the transaction
648
649 <get_col_idx>
650 - if true, the returned data will include a dictionary
651 mapping field names to column positions
652 - if false, the returned data returns an empty dict
653
654 method result:
655 - returns a tuple (status, data)
656 - <status>:
657 * True - if all queries succeeded (also if there were 0 queries)
658 * False - if *any* error occurred
659 - <data> if <status> is True:
660 * (None, {}) if last query did not return rows
661 * ("fetchall() result", <index>) if last query returned any rows
662 * for <index> see <get_col_idx>
663 - <data> if <status> is False:
664 * a tuple (error, message) where <error> can be:
665 * 1: unspecified error
666 * 2: concurrency error
667 * 3: constraint violation (non-primary key)
668 * 4: access violation
669 """
670
671 if queries is None:
672 return (False, (1, 'forgot to pass in queries'))
673 if len(queries) == 0:
674 return (True, 'no queries to execute')
675
676
677
678 if hasattr(link_obj, 'fetchone') and hasattr(link_obj, 'description'):
679 return __commit2cursor(cursor=link_obj, queries=queries, extra_verbose=extra_verbose, get_col_idx=get_col_idx)
680
681 if (hasattr(link_obj, 'commit') and hasattr(link_obj, 'cursor')):
682 return __commit2conn(conn=link_obj, queries=queries, end_tx=end_tx, extra_verbose=extra_verbose, get_col_idx=get_col_idx)
683
684 return __commit2service(service=link_obj, queries=queries, max_tries=max_tries, extra_verbose=extra_verbose, get_col_idx=get_col_idx)
685
686 -def __commit2service(service=None, queries=None, max_tries=1, extra_verbose=False, get_col_idx=False):
687
688 try: int(max_tries)
689 except ValueEror: max_tries = 1
690 if max_tries > 4:
691 max_tries = 4
692 if max_tries < 1:
693 max_tries = 1
694
695 pool = ConnectionPool()
696 conn = pool.GetConnection(str(service), readonly = 0)
697 if conn is None:
698 msg = 'cannot connect to service [%s]'
699 _log.Log(gmLog.lErr, msg % service)
700 return (False, (1, _(msg) % service))
701 if extra_verbose:
702 conn.conn.toggleShowQuery
703 curs = conn.cursor()
704 for attempt in range(0, max_tries):
705 if extra_verbose:
706 _log.Log(gmLog.lData, 'attempt %s' % attempt)
707
708 for query, args in queries:
709 if extra_verbose:
710 t1 = time.time()
711 try:
712 curs.execute(query, *args)
713
714 except:
715 if extra_verbose:
716 duration = time.time() - t1
717 _log.Log(gmLog.lData, 'query took %3.3f seconds' % duration)
718 conn.rollback()
719 exc_info = sys.exc_info()
720 typ, val, tb = exc_info
721 if str(val).find(_serialize_failure) > 0:
722 _log.Log(gmLog.lData, 'concurrency conflict detected, cannot serialize access due to concurrent update')
723 if attempt < max_tries:
724
725 time.sleep(0.1)
726 continue
727 curs.close()
728 conn.close()
729 return (False, (2, 'l'))
730
731 _log.Log(gmLog.lErr, 'query: %s' % query[:2048])
732 try:
733 _log.Log(gmLog.lErr, 'argument: %s' % str(args)[:2048])
734 except MemoryError:
735 pass
736 _log.LogException("query failed on link [%s]" % service, exc_info)
737 if extra_verbose:
738 __log_PG_settings(curs)
739 curs.close()
740 conn.close()
741 tmp = str(val).replace('ERROR:', '')
742 tmp = tmp.replace('ExecAppend:', '')
743 tmp = tmp.strip()
744 return (False, (1, _('SQL: %s') % tmp))
745
746 if extra_verbose:
747 duration = time.time() - t1
748 _log.Log(gmLog.lData, 'query: %s' % query[:2048])
749 try:
750 _log.Log(gmLog.lData, 'args : %s' % str(args)[:2048])
751 except MemoryError:
752 pass
753 _log.Log(gmLog.lData, 'query succeeded on link [%s]' % service)
754 _log.Log(gmLog.lData, '%s rows affected/returned in %3.3f seconds' % (curs.rowcount, duration))
755
756 break
757
758
759 data = None
760 idx = {}
761
762
763
764
765
766
767 try:
768 data = curs.fetchall()
769 except:
770 if extra_verbose:
771 _log.Log(gmLog.lData, 'fetchall(): last query did not return rows')
772
773 if curs.description is not None:
774 _log.Log(gmLog.lData, 'there seem to be rows but fetchall() failed -- DB API violation ?')
775 _log.Log(gmLog.lData, 'rowcount: %s, description: %s' % (curs.rowcount, curs.description))
776 conn.commit()
777 if get_col_idx:
778 idx = get_col_indices(curs)
779 curs.close()
780 conn.close()
781 return (True, (data, idx))
782
783 -def __commit2conn(conn=None, queries=None, end_tx=False, extra_verbose=False, get_col_idx=False):
784 if extra_verbose:
785 conn.conn.toggleShowQuery
786
787
788 curs = conn.cursor()
789
790
791 for query, args in queries:
792 if extra_verbose:
793 t1 = time.time()
794 try:
795 curs.execute(query, *args)
796 except:
797 if extra_verbose:
798 duration = time.time() - t1
799 _log.Log(gmLog.lData, 'query took %3.3f seconds' % duration)
800 conn.rollback()
801 exc_info = sys.exc_info()
802 typ, val, tb = exc_info
803 if str(val).find(_serialize_failure) > 0:
804 _log.Log(gmLog.lData, 'concurrency conflict detected, cannot serialize access due to concurrent update')
805 curs.close()
806 if extra_verbose:
807 conn.conn.toggleShowQuery
808 return (False, (2, 'l'))
809
810 _log.Log(gmLog.lErr, 'query: %s' % query[:2048])
811 try:
812 _log.Log(gmLog.lErr, 'args : %s' % str(args)[:2048])
813 except MemoryError:
814 pass
815 _log.LogException("query failed on link [%s]" % conn, exc_info)
816 if extra_verbose:
817 __log_PG_settings(curs)
818 curs.close()
819 tmp = str(val).replace('ERROR:', '')
820 tmp = tmp.replace('ExecAppend:', '')
821 tmp = tmp.strip()
822 if extra_verbose:
823 conn.conn.toggleShowQuery
824 return (False, (1, _('SQL: %s') % tmp))
825
826 if extra_verbose:
827 duration = time.time() - t1
828 _log.Log(gmLog.lData, 'query: %s' % query[:2048])
829 try:
830 _log.Log(gmLog.lData, 'args : %s' % str(args)[:2048])
831 except MemoryError:
832 pass
833 _log.Log(gmLog.lData, 'query succeeded on link [%s]' % conn)
834 _log.Log(gmLog.lData, '%s rows affected/returned in %3.3f seconds' % (curs.rowcount, duration))
835
836 if extra_verbose:
837 conn.conn.toggleShowQuery
838
839 data = None
840 idx = {}
841
842
843
844
845
846
847 try:
848 data = curs.fetchall()
849 except:
850 if extra_verbose:
851 _log.Log(gmLog.lData, 'fetchall(): last query did not return rows')
852
853 if curs.description is not None:
854 _log.Log(gmLog.lData, 'there seem to be rows but fetchall() failed -- DB API violation ?')
855 _log.Log(gmLog.lData, 'rowcount: %s, description: %s' % (curs.rowcount, curs.description))
856 if end_tx:
857 conn.commit()
858 if get_col_idx:
859 idx = get_col_indices(curs)
860 curs.close()
861 return (True, (data, idx))
862
863 -def __commit2cursor(cursor=None, queries=None, extra_verbose=False, get_col_idx=False):
864
865 for query, args in queries:
866 if extra_verbose:
867 t1 = time.time()
868 try:
869 curs.execute(query, *args)
870 except:
871 if extra_verbose:
872 duration = time.time() - t1
873 _log.Log(gmLog.lData, 'query took %3.3f seconds' % duration)
874 exc_info = sys.exc_info()
875 typ, val, tb = exc_info
876 if str(val).find(_serialize_failure) > 0:
877 _log.Log(gmLog.lData, 'concurrency conflict detected, cannot serialize access due to concurrent update')
878 return (False, (2, 'l'))
879
880 _log.Log(gmLog.lErr, 'query: %s' % query[:2048])
881 try:
882 _log.Log(gmLog.lErr, 'args : %s' % str(args)[:2048])
883 except MemoryError:
884 pass
885 _log.LogException("query failed on link [%s]" % cursor, exc_info)
886 if extra_verbose:
887 __log_PG_settings(curs)
888 tmp = str(val).replace('ERROR:', '')
889 tmp = tmp.replace('ExecAppend:', '')
890 tmp = tmp.strip()
891 return (False, (1, _('SQL: %s') % tmp))
892
893 if extra_verbose:
894 duration = time.time() - t1
895 _log.Log(gmLog.lData, 'query: %s' % query[:2048])
896 try:
897 _log.Log(gmLog.lData, 'args : %s' % str(args)[:2048])
898 except MemoryError:
899 pass
900 _log.Log(gmLog.lData, 'query succeeded on link [%s]' % cursor)
901 _log.Log(gmLog.lData, '%s rows affected/returned in %3.3f seconds' % (curs.rowcount, duration))
902
903
904 data = None
905 idx = {}
906
907
908
909
910
911
912 try:
913 data = curs.fetchall()
914 except:
915 if extra_verbose:
916 _log.Log(gmLog.lData, 'fetchall(): last query did not return rows')
917
918 if curs.description is not None:
919 _log.Log(gmLog.lData, 'there seem to be rows but fetchall() failed -- DB API violation ?')
920 _log.Log(gmLog.lData, 'rowcount: %s, description: %s' % (curs.rowcount, curs.description))
921 if get_col_idx:
922 idx = get_col_indices(curs)
923 return (True, (data, idx))
924
925 -def run_commit(link_obj = None, queries = None, return_err_msg = None):
926 """Convenience function for running a transaction
927 that is supposed to get committed.
928
929 - link_obj can be
930 - a cursor: rollback/commit must be done by the caller
931 - a connection: rollback/commit is handled
932 - a service name: rollback/commit is handled
933
934 - queries is a list of (query, [args]) tuples
935 - executed as a single transaction
936
937 - returns:
938 - a tuple (<value>, error) if return_err_msg is True
939 - a scalar <value> if return_err_msg is False
940
941 - <value> will be
942 - None: if any query failed
943 - 1: if all queries succeeded (also 0 queries)
944 - data: if the last query returned rows
945 """
946 print "DEPRECATION WARNING: gmPG.run_commit() is deprecated, use run_commit2() instead"
947
948
949 if link_obj is None:
950 raise TypeError, 'gmPG.run_commit(): link_obj must be of type service name, connection or cursor'
951 if queries is None:
952 raise TypeError, 'gmPG.run_commit(): forgot to pass in queries'
953 if len(queries) == 0:
954 _log.Log(gmLog.lWarn, 'no queries to execute ?!?')
955 if return_err_msg:
956 return (1, 'no queries to execute ?!?')
957 return 1
958
959 close_cursor = noop
960 close_conn = noop
961 commit = noop
962 rollback = noop
963
964 if hasattr(link_obj, 'fetchone') and hasattr(link_obj, 'description'):
965 curs = link_obj
966
967 elif (hasattr(link_obj, 'commit') and hasattr(link_obj, 'cursor')):
968 curs = link_obj.cursor()
969 close_cursor = curs.close
970 conn = link_obj
971 commit = link_obj.commit
972 rollback = link_obj.rollback
973
974 else:
975 pool = ConnectionPool()
976 conn = pool.GetConnection(link_obj, readonly = 0)
977 if conn is None:
978 _log.Log(gmLog.lErr, 'cannot connect to service [%s]' % link_obj)
979 if return_err_msg:
980 return (None, _('cannot connect to service [%s]') % link_obj)
981 return None
982 curs = conn.cursor()
983 close_cursor = curs.close
984 close_conn = conn.close
985 commit = conn.commit
986 rollback = conn.rollback
987
988 for query, args in queries:
989
990 try:
991 curs.execute (query, *args)
992 except:
993 rollback()
994 exc_info = sys.exc_info()
995 _log.LogException ("RW query >>>%s<<< with args >>>%s<<< failed on link [%s]" % (query[:1024], str(args)[:1024], link_obj), exc_info, verbose = _query_logging_verbosity)
996 __log_PG_settings(curs)
997 close_cursor()
998 close_conn()
999 if return_err_msg:
1000 typ, val, tb = exc_info
1001 tmp = string.replace(str(val), 'ERROR:', '')
1002 tmp = string.replace(tmp, 'ExecAppend:', '')
1003 tmp = string.strip(tmp)
1004 return (None, 'SQL: %s' % tmp)
1005 return None
1006
1007
1008 if _query_logging_verbosity == 1:
1009 _log.Log(gmLog.lData, '%s rows affected by >>>%s<<<' % (curs.rowcount, query))
1010
1011 data = None
1012
1013
1014
1015
1016
1017 try:
1018 data = curs.fetchall()
1019 if _query_logging_verbosity == 1:
1020 _log.Log(gmLog.lData, 'last query returned %s rows' % curs.rowcount)
1021 except:
1022 if _query_logging_verbosity == 1:
1023 _log.Log(gmLog.lData, 'fetchall(): last query did not return rows')
1024
1025 if curs.description is not None:
1026 if curs.rowcount > 0:
1027 _log.Log(gmLog.lData, 'there seem to be rows but fetchall() failed -- DB API violation ?')
1028 _log.Log(gmLog.lData, 'rowcount: %s, description: %s' % (curs.rowcount, curs.description))
1029
1030
1031 commit()
1032 close_cursor()
1033 close_conn()
1034
1035 if data is None: status = 1
1036 else: status = data
1037 if return_err_msg: return (status, '')
1038 return status
1039
1040 -def run_ro_query(link_obj = None, aQuery = None, get_col_idx = False, *args):
1041 """Runs a read-only query.
1042
1043 - link_obj can be a service name, connection or cursor object
1044
1045 - return status:
1046 - return data if get_col_idx is False
1047 - return (data, idx) if get_col_idx is True
1048
1049 - if query fails: data is None
1050 - if query is not a row-returning SQL statement: data is None
1051
1052 - data is a list of tuples [(w,x,y,z), (a,b,c,d), ...] where each tuple is a table row
1053 - idx is a map of column name to their position in the row tuples
1054 e.g. { 'name': 3, 'id':0, 'job_description': 2, 'location':1 }
1055
1056 usage: e.g. data[0][idx['name']] would return z from [(w,x,y,z ),(a,b,c,d)]
1057 """
1058
1059 if link_obj is None:
1060 raise TypeError, 'gmPG.run_ro_query(): link_obj must be of type service name, connection or cursor'
1061 if aQuery is None:
1062 raise TypeError, 'gmPG.run_ro_query(): forgot to pass in aQuery'
1063
1064 close_cursor = noop
1065 close_conn = noop
1066
1067 if hasattr(link_obj, 'fetchone') and hasattr(link_obj, 'description'):
1068 curs = link_obj
1069
1070 elif (hasattr(link_obj, 'commit') and hasattr(link_obj, 'cursor')):
1071 curs = link_obj.cursor()
1072 close_cursor = curs.close
1073
1074 else:
1075 pool = ConnectionPool()
1076 conn = pool.GetConnection(link_obj, readonly = 1)
1077 if conn is None:
1078 _log.Log(gmLog.lErr, 'cannot get connection to service [%s]' % link_obj)
1079 if not get_col_idx:
1080 return None
1081 else:
1082 return None, None
1083 curs = conn.cursor()
1084 close_cursor = curs.close
1085 close_conn = pool.ReleaseConnection
1086
1087
1088 try:
1089 curs.execute(aQuery, *args)
1090 global last_ro_cursor_desc
1091 last_ro_cursor_desc = curs.description
1092 except:
1093 _log.LogException("query >>>%s<<< with args >>>%s<<< failed on link [%s]" % (aQuery[:250], str(args)[:250], link_obj), sys.exc_info(), verbose = _query_logging_verbosity)
1094 __log_PG_settings(curs)
1095 close_cursor()
1096 close_conn(link_obj)
1097 if not get_col_idx:
1098 return None
1099 else:
1100 return None, None
1101
1102
1103
1104 if curs.description is None:
1105 data = None
1106 _log.Log(gmLog.lErr, 'query did not return rows')
1107 else:
1108 try:
1109 data = curs.fetchall()
1110 except:
1111 _log.LogException('cursor.fetchall() failed on link [%s]' % link_obj, sys.exc_info(), verbose = _query_logging_verbosity)
1112 close_cursor()
1113 close_conn(link_obj)
1114 if not get_col_idx:
1115 return None
1116 else:
1117 return None, None
1118
1119
1120 close_conn(link_obj)
1121 if get_col_idx:
1122 col_idx = get_col_indices(curs)
1123 close_cursor()
1124 return data, col_idx
1125 else:
1126 close_cursor()
1127 return data
1128
1129
1131
1132 if aCursor is None:
1133 _log.Log(gmLog.lErr, 'need cursor to get column indices')
1134 return None
1135 if aCursor.description is None:
1136 _log.Log(gmLog.lErr, 'no result description available: cursor unused or last query did not select rows')
1137 return None
1138 col_indices = {}
1139 col_index = 0
1140 for col_desc in aCursor.description:
1141 col_indices[col_desc[0]] = col_index
1142 col_index += 1
1143 return col_indices
1144
1145
1146
1148
1149 if aCursor is None:
1150 _log.Log(gmLog.lErr, 'need cursor to determine primary key')
1151 return None
1152 if aTable is None:
1153 _log.Log(gmLog.lErr, 'need table name for which to determine primary key')
1154
1155 if not run_query(aCursor, None, query_pkey_name, aTable):
1156 _log.Log(gmLog.lErr, 'cannot determine primary key')
1157 return -1
1158 result = aCursor.fetchone()
1159 if result is None:
1160 return None
1161 return result[0]
1162
1164 """Returns a dictionary of referenced foreign keys.
1165
1166 key = column name of this table
1167 value = (referenced table name, referenced column name) tuple
1168 """
1169 manage_connection = 0
1170 close_cursor = 1
1171
1172 if hasattr(source, 'fetchone') and hasattr(source, 'description'):
1173 close_cursor = 0
1174 curs = source
1175
1176 elif (hasattr(source, 'commit') and hasattr(source, 'cursor')):
1177 curs = source.cursor()
1178
1179 else:
1180 manage_connection = 1
1181 pool = ConnectionPool()
1182 conn = pool.GetConnection(source)
1183 if conn is None:
1184 _log.Log(gmLog.lErr, 'cannot get fkey names on table [%s] from source [%s]' % (table, source))
1185 return None
1186 curs = conn.cursor()
1187
1188 if not run_query(curs, None, query_fkey_names, table):
1189 if close_cursor:
1190 curs.close()
1191 if manage_connection:
1192 pool.ReleaseConnection(source)
1193 _log.Log(gmLog.lErr, 'cannot get foreign keys on table [%s] from source [%s]' % (table, source))
1194 return None
1195
1196 fks = curs.fetchall()
1197 if close_cursor:
1198 curs.close()
1199 if manage_connection:
1200 pool.ReleaseConnection(source)
1201
1202 references = {}
1203 for fk in fks:
1204 fkname, src_table, target_table, tmp, src_col, target_col, tmp = string.split(fk[0], '\x00')
1205 references[src_col] = (target_table, target_col)
1206
1207 return references
1208
1209 -def add_housekeeping_todo(
1210 reporter='$RCSfile: gmPG.py,v $ $Revision: 1.90 $',
1211 receiver='DEFAULT',
1212 problem='lazy programmer',
1213 solution='lazy programmer',
1214 context='lazy programmer',
1215 category='lazy programmer'
1216 ):
1217 queries = []
1218 cmd = "insert into housekeeping_todo (reported_by, reported_to, problem, solution, context, category) values (%s, %s, %s, %s, %s, %s)"
1219 queries.append((cmd, [reporter, receiver, problem, solution, context, category]))
1220 cmd = "select currval('housekeeping_todo_pk_seq')"
1221 queries.append((cmd, []))
1222 result, err = run_commit('historica', queries, 1)
1223 if result is None:
1224 _log.Log(gmLog.lErr, err)
1225 return (None, err)
1226 return (1, result[0][0])
1227
1229
1230 def myCallback(**kwds):
1231 sys.stdout.flush()
1232 print "\n=== myCallback: got called ==="
1233 print kwds
1234
1235
1236 dbpool = ConnectionPool()
1237 roconn = dbpool.GetConnection('default', extra_verbose=1)
1238 rocurs = roconn.cursor()
1239
1240
1241 print "PostgreSQL backend listener debug shell"
1242 while 1:
1243 print "---------------------------------------"
1244 typed = raw_input("=> ")
1245 args = typed.split(' ')
1246
1247 if len(args) == 0:
1248 continue
1249
1250 if args[0] in ('help', '?'):
1251 print "known commands"
1252 print "--------------"
1253 print "'listen' - start listening to a signal"
1254 print "'ignore' - stop listening to a signal"
1255 print "'send' - send a signal"
1256 print "'quit', 'exit', 'done' - well, chicken out"
1257 continue
1258
1259 if args[0] in ('quit', 'exit', 'done'):
1260 break
1261
1262 if args[0] in ("listen", "ignore", "send"):
1263 typed = raw_input("signal name: ")
1264 sig_names = typed.split(' ')
1265
1266 if len(sig_names) == 0:
1267 continue
1268 if args[0] == "listen":
1269 dbpool.Listen('default', sig_names[0], myCallback)
1270 if args[0] == "ignore":
1271 dbpool.Unlisten('default', sig_names[0], myCallback)
1272 if args[0] == "send":
1273 cmd = 'NOTIFY "%s"' % sig_names[0]
1274 print "... running >>>%s<<<" % (cmd)
1275 if not run_query(rocurs, None, cmd):
1276 print "... error sending [%s]" % cmd
1277 roconn.commit()
1278 continue
1279 print 'unknown command [%s]' % typed
1280
1281
1282 print "please wait a second or two for threads to sync and die"
1283 dbpool.StopListener('default')
1284 rocurs.close()
1285 roconn.close()
1286 dbpool.ReleaseConnection('default')
1287
1288
1289
1290 if __name__ == "__main__":
1291 _log.Log(gmLog.lData, 'DBMS "%s" via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % ('PostgreSQL', dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle))
1292
1293 print "Do you want to test the backend notification code ?"
1294 yes_no = raw_input('y/n: ')
1295 if yes_no == 'y':
1296 __run_notifications_debugger()
1297 sys.exit()
1298
1299 dbpool = ConnectionPool()
1300
1301 print "\n\nServices available on this system:"
1302 print '-----------------------------------------'
1303 for service in dbpool.GetAvailableServices():
1304 print service
1305 dummy = dbpool.GetConnection(service)
1306 print "\n.......................................\n"
1307
1308
1309 db = dbpool.GetConnection('config')
1310 print "\n\nPossible services on any gnumed system:"
1311 print '-----------------------------------------'
1312 cursor = db.cursor()
1313 cursor.execute("select name from cfg.distributed_db")
1314 for service in cursor.fetchall():
1315 print service[0]
1316
1317 print "\nTesting convenience funtions:\n============================\n"
1318
1319 print "\nResult as dictionary\n==================\n"
1320 cur = db.cursor()
1321 cursor.execute("select * from cfg.db")
1322 d = dictResult(cursor)
1323 print d
1324 print "\nResult attributes\n==================\n"
1325 n = fieldNames(cursor)
1326
1328 print "[Backend notification received!]"
1329
1330 print "\n-------------------------------------"
1331 print "Testing asynchronous notification for approx. 20 seconds"
1332 print "start psql in another window connect to gnumed"
1333 print "and type 'notify test'; if everything works,"
1334 print "a message [Backend notification received!] should appear\n"
1335 dbpool.Listen('default', 'test', TestCallback)
1336 time.sleep(20)
1337 dbpool.StopListener('default')
1338 print "Requesting write access connection:"
1339 con = dbpool.GetConnection('default', readonly=0)
1340
1341
1342