Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  __version__ = "$Revision: 1.127 $" 
  15  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  16  __license__ = 'GPL (details at http://www.gnu.org)' 
  17   
  18  ### imports ### 
  19  # stdlib 
  20  import time, locale, sys, re as regex, os, codecs, types, datetime as pydt, logging 
  21   
  22   
  23  # GNUmed 
  24  if __name__ == '__main__': 
  25          sys.path.insert(0, '../../') 
  26  from Gnumed.pycommon import gmLoginInfo, gmExceptions, gmDateTime, gmBorg, gmI18N, gmLog2 
  27  from Gnumed.pycommon.gmTools import prompted_input 
  28   
  29  _log = logging.getLogger('gm.db') 
  30  _log.info(__version__) 
  31   
  32   
  33  # 3rd party 
  34  try: 
  35          import psycopg2 as dbapi 
  36  except ImportError: 
  37          _log.exception("Python database adapter psycopg2 not found.") 
  38          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  39          raise 
  40  ### imports ### 
  41   
  42   
  43  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  44  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  45  if not (float(dbapi.apilevel) >= 2.0): 
  46          raise ImportError('gmPG2: supported DB-API level too low') 
  47  if not (dbapi.threadsafety > 0): 
  48          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  49  if not (dbapi.paramstyle == 'pyformat'): 
  50          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  51  try: 
  52          dbapi.__version__.index('dt') 
  53  except ValueError: 
  54          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  55  try: 
  56          dbapi.__version__.index('ext') 
  57  except ValueError: 
  58          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  59  try: 
  60          dbapi.__version__.index('pq3') 
  61  except ValueError: 
  62          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  63   
  64  import psycopg2.extras 
  65  import psycopg2.extensions 
  66  import psycopg2.pool 
  67  import psycopg2.errorcodes as sql_error_codes 
  68   
  69  # ======================================================================= 
  70  _default_client_encoding = 'UTF8' 
  71  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  72   
  73  # things timezone 
  74  _default_client_timezone = None                 # default time zone for connections 
  75  _sql_set_timezone = None 
  76  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  77  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  78   
  79  _default_dsn = None 
  80  _default_login = None 
  81   
  82  postgresql_version_string = None 
  83  postgresql_version = None                       # accuracy: major.minor 
  84   
  85  __ro_conn_pool = None 
  86   
  87  auto_request_login_params = True 
  88  # ======================================================================= 
  89  # global data 
  90  # ======================================================================= 
  91   
  92  known_schema_hashes = { 
  93          'devel': 'not released, testing only', 
  94          'v2': 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
  95          'v3': 'e73718eaf230d8f1d2d01afa8462e176', 
  96          'v4': '4428ccf2e54c289136819e701bb095ea', 
  97          'v5': '7e7b093af57aea48c288e76632a382e5',       # ... old (v1) style hashes 
  98          'v6': '90e2026ac2efd236da9c8608b8685b2d',       # new (v2) style hashes ... 
  99          'v7': '6c9f6d3981483f8e9433df99d1947b27', 
 100          'v8': '89b13a7af83337c3aad153b717e52360', 
 101          'v9': '641a9b2be3c378ffc2bb2f0b1c9f051d', 
 102          'v10': '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 103          'v11': '03042ae24f3f92877d986fb0a6184d76', 
 104          'v12': '06183a6616db62257e22814007a8ed07', 
 105          'v13': 'fab7c1ae408a6530c47f9b5111a0841e', 
 106          'v14': 'e170d543f067d1ea60bfe9076b1560cf' 
 107  } 
 108   
 109  map_schema_hash2version = { 
 110          'b09d50d7ed3f91ddf4c4ddb8ea507720': 'v2', 
 111          'e73718eaf230d8f1d2d01afa8462e176': 'v3', 
 112          '4428ccf2e54c289136819e701bb095ea': 'v4', 
 113          '7e7b093af57aea48c288e76632a382e5': 'v5', 
 114          '90e2026ac2efd236da9c8608b8685b2d': 'v6', 
 115          '6c9f6d3981483f8e9433df99d1947b27': 'v7', 
 116          '89b13a7af83337c3aad153b717e52360': 'v8', 
 117          '641a9b2be3c378ffc2bb2f0b1c9f051d': 'v9', 
 118          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 'v10', 
 119          '03042ae24f3f92877d986fb0a6184d76': 'v11', 
 120          '06183a6616db62257e22814007a8ed07': 'v12', 
 121          'fab7c1ae408a6530c47f9b5111a0841e': 'v13', 
 122          'e170d543f067d1ea60bfe9076b1560cf': 'v14' 
 123  } 
 124   
 125  map_client_branch2required_db_version = { 
 126          u'GIT tree': u'devel', 
 127          u'0.3': u'v9', 
 128          u'0.4': u'v10', 
 129          u'0.5': u'v11', 
 130          u'0.6': u'v12', 
 131          u'0.7': u'v13', 
 132          u'0.8': u'v14', 
 133          u'0.9': u'v15' 
 134  } 
 135   
 136  # get columns and data types for a given table 
 137  query_table_col_defs = u"""select 
 138          cols.column_name, 
 139          cols.udt_name 
 140  from 
 141          information_schema.columns cols 
 142  where 
 143          cols.table_schema = %s 
 144                  and 
 145          cols.table_name = %s 
 146  order by 
 147          cols.ordinal_position""" 
 148   
 149  query_table_attributes = u"""select 
 150          cols.column_name 
 151  from 
 152          information_schema.columns cols 
 153  where 
 154          cols.table_schema = %s 
 155                  and 
 156          cols.table_name = %s 
 157  order by 
 158          cols.ordinal_position""" 
 159   
 160  # ======================================================================= 
 161  # module globals API 
 162  # ======================================================================= 
163 -def set_default_client_encoding(encoding = None):
164 # check whether psycopg2 can handle this encoding 165 if encoding not in psycopg2.extensions.encodings: 166 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 167 # check whether Python can handle this encoding 168 py_enc = psycopg2.extensions.encodings[encoding] 169 try: 170 codecs.lookup(py_enc) 171 except LookupError: 172 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 173 raise 174 # FIXME: check encoding against the database 175 # FIXME: - but we may not yet have access 176 # FIXME: - psycopg2 will pull its encodings from the database eventually 177 # it seems save to set it 178 global _default_client_encoding 179 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 180 _default_client_encoding = encoding 181 return True
182 #---------------------------------------------------
183 -def set_default_client_timezone(timezone = None):
184 185 # FIXME: use __validate 186 global _default_client_timezone 187 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 188 _default_client_timezone = timezone 189 190 global _sql_set_timezone 191 _sql_set_timezone = u'set timezone to %s' 192 193 return True
194 #---------------------------------------------------
195 -def __validate_timezone(conn=None, timezone=None):
196 197 _log.debug(u'validating time zone [%s]', timezone) 198 199 cmd = u'set timezone to %(tz)s' 200 args = {u'tz': timezone} 201 202 conn.commit() 203 curs = conn.cursor() 204 is_valid = False 205 try: 206 curs.execute(cmd, args) 207 _log.info(u'time zone [%s] is settable', timezone) 208 # can we actually use it, though ? 209 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 210 try: 211 curs.execute(cmd) 212 curs.fetchone() 213 _log.info(u'time zone [%s] is usable', timezone) 214 is_valid = True 215 except: 216 _log.error('error using time zone [%s]', timezone) 217 except dbapi.DataError: 218 _log.warning(u'time zone [%s] is not settable', timezone) 219 except: 220 _log.error(u'failed to set time zone to [%s]', timezone) 221 _log.exception(u'') 222 223 curs.close() 224 conn.rollback() 225 226 return is_valid
227 #---------------------------------------------------
228 -def __expand_timezone(conn=None, timezone=None):
229 """some timezone defs are abbreviations so try to expand 230 them because "set time zone" doesn't take abbreviations""" 231 232 cmd = u""" 233 select distinct on (abbrev) name 234 from pg_timezone_names 235 where 236 abbrev = %(tz)s and 237 name ~ '^[^/]+/[^/]+$' and 238 name !~ '^Etc/' 239 """ 240 args = {u'tz': timezone} 241 242 conn.commit() 243 curs = conn.cursor() 244 245 result = timezone 246 try: 247 curs.execute(cmd, args) 248 rows = curs.fetchall() 249 if len(rows) > 0: 250 result = rows[0][0] 251 _log.debug(u'[%s] maps to [%s]', timezone, result) 252 except: 253 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 254 255 curs.close() 256 conn.rollback() 257 258 return result
259 #---------------------------------------------------
260 -def __detect_client_timezone(conn=None):
261 """This is run on the very first connection.""" 262 263 # FIXME: check whether server.timezone is the same 264 # FIXME: value as what we eventually detect 265 266 # we need gmDateTime to be initialized 267 if gmDateTime.current_local_iso_numeric_timezone_string is None: 268 gmDateTime.init() 269 270 _log.debug('trying to detect timezone from system') 271 272 tz_candidates = [] 273 try: 274 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 275 tz_candidates.append(tz) 276 expanded = __expand_timezone(conn = conn, timezone = tz) 277 if expanded != tz: 278 tz_candidates.append(expanded) 279 except KeyError: 280 pass 281 282 tz_candidates.append(gmDateTime.current_local_timezone_name) 283 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 284 if expanded != gmDateTime.current_local_timezone_name: 285 tz_candidates.append(expanded) 286 287 _log.debug('candidates: %s', str(tz_candidates)) 288 289 # find best among candidates 290 global _default_client_timezone 291 global _sql_set_timezone 292 found = False 293 for tz in tz_candidates: 294 if __validate_timezone(conn = conn, timezone = tz): 295 _default_client_timezone = tz 296 _sql_set_timezone = u'set timezone to %s' 297 found = True 298 break 299 300 if not found: 301 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 302 _sql_set_timezone = u"set time zone interval %s hour to minute" 303 304 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
305 # ======================================================================= 306 # login API 307 # =======================================================================
308 -def __request_login_params_tui():
309 """Text mode request of database login parameters""" 310 import getpass 311 login = gmLoginInfo.LoginInfo() 312 313 print "\nPlease enter the required login parameters:" 314 try: 315 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '') 316 login.database = prompted_input(prompt = "database", default = 'gnumed_v15') 317 login.user = prompted_input(prompt = "user name", default = '') 318 tmp = 'password for "%s" (not shown): ' % login.user 319 login.password = getpass.getpass(tmp) 320 login.port = prompted_input(prompt = "port", default = 5432) 321 except KeyboardInterrupt: 322 _log.warning("user cancelled text mode login dialog") 323 print "user cancelled text mode login dialog" 324 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 325 326 return login
327 #---------------------------------------------------
328 -def __request_login_params_gui_wx():
329 """GUI (wx) input request for database login parameters. 330 331 Returns gmLoginInfo.LoginInfo object 332 """ 333 import wx 334 # OK, wxPython was already loaded. But has the main Application instance 335 # been initialized yet ? if not, the exception will kick us out 336 if wx.GetApp() is None: 337 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 338 339 # Let's launch the login dialog 340 # if wx was not initialized /no main App loop, an exception should be raised anyway 341 import gmAuthWidgets 342 dlg = gmAuthWidgets.cLoginDialog(None, -1) 343 dlg.ShowModal() 344 login = dlg.panel.GetLoginInfo() 345 dlg.Destroy() 346 347 #if user cancelled or something else went wrong, raise an exception 348 if login is None: 349 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 350 351 return login
352 #---------------------------------------------------
353 -def request_login_params():
354 """Request login parameters for database connection.""" 355 # do we auto-request parameters at all ? 356 if not auto_request_login_params: 357 raise Exception('Cannot request login parameters.') 358 359 # are we inside X ? 360 # (if we aren't wxGTK will crash hard at 361 # C-level with "can't open Display") 362 if os.environ.has_key('DISPLAY'): 363 # try wxPython GUI 364 try: return __request_login_params_gui_wx() 365 except: pass 366 367 # well, either we are on the console or 368 # wxPython does not work, use text mode 369 return __request_login_params_tui()
370 371 # ======================================================================= 372 # DSN API 373 # -----------------------------------------------------------------------
374 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
375 dsn_parts = [] 376 377 if (database is not None) and (database.strip() != ''): 378 dsn_parts.append('dbname=%s' % database) 379 380 if (host is not None) and (host.strip() != ''): 381 dsn_parts.append('host=%s' % host) 382 383 if (port is not None) and (str(port).strip() != ''): 384 dsn_parts.append('port=%s' % port) 385 386 if (user is not None) and (user.strip() != ''): 387 dsn_parts.append('user=%s' % user) 388 389 if (password is not None) and (password.strip() != ''): 390 dsn_parts.append('password=%s' % password) 391 392 dsn_parts.append('sslmode=prefer') 393 394 return ' '.join(dsn_parts)
395 # ------------------------------------------------------
396 -def get_default_login():
397 # make sure we do have a login 398 get_default_dsn() 399 return _default_login
400 # ------------------------------------------------------
401 -def get_default_dsn():
402 global _default_dsn 403 if _default_dsn is not None: 404 return _default_dsn 405 406 login = request_login_params() 407 set_default_login(login=login) 408 409 return _default_dsn
410 # ------------------------------------------------------
411 -def set_default_login(login=None):
412 if login is None: 413 return False 414 415 if login.host is not None: 416 if login.host.strip() == u'': 417 login.host = None 418 419 global _default_login 420 _default_login = login 421 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 422 423 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 424 425 global _default_dsn 426 _default_dsn = dsn 427 _log.info('setting default DSN from [%s] to [%s]' % (_default_dsn, dsn)) 428 429 return True
430 # ======================================================================= 431 # netadata API 432 # =======================================================================
433 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
434 expected_hash = known_schema_hashes[version] 435 if version == 'devel': 436 args = {'ver': '9999'} 437 else: 438 args = {'ver': version.strip('v')} 439 rows, idx = run_ro_queries ( 440 link_obj = link_obj, 441 queries = [{ 442 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 443 'args': args 444 }] 445 ) 446 if rows[0]['md5'] != expected_hash: 447 _log.error('database schema version mismatch') 448 _log.error('expected: %s (%s)' % (version, expected_hash)) 449 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 450 if verbose: 451 _log.debug('schema dump follows:') 452 for line in get_schema_structure(link_obj=link_obj).split(): 453 _log.debug(line) 454 _log.debug('schema revision history dump follows:') 455 for line in get_schema_revision_history(link_obj=link_obj): 456 _log.debug(u' - '.join(line)) 457 return False 458 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 459 return True
460 #------------------------------------------------------------------------
461 -def get_schema_version(link_obj=None):
462 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 463 try: 464 return map_schema_hash2version[rows[0]['md5']] 465 except KeyError: 466 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
467 #------------------------------------------------------------------------
468 -def get_schema_structure(link_obj=None):
469 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 470 return rows[0][0]
471 #------------------------------------------------------------------------
472 -def get_schema_revision_history(link_obj=None):
473 cmd = u""" 474 select 475 imported::text, 476 version, 477 filename 478 from gm.schema_revision 479 order by imported 480 """ 481 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': cmd}]) 482 return rows
483 #------------------------------------------------------------------------
484 -def get_current_user():
485 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 486 return rows[0][0]
487 #------------------------------------------------------------------------
488 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
489 """Get the foreign keys pointing to schema.table.column. 490 491 Does not properly work with multi-column FKs. 492 GNUmed doesn't use any, however. 493 """ 494 cmd = u""" 495 select 496 %(schema)s as referenced_schema, 497 %(tbl)s as referenced_table, 498 %(col)s as referenced_column, 499 pgc.confkey as referenced_column_list, 500 pgc.conrelid::regclass as referencing_table, 501 pgc.conkey as referencing_column_list, 502 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 503 from 504 pg_constraint pgc 505 where 506 pgc.contype = 'f' 507 and 508 pgc.confrelid = ( 509 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 510 select oid from pg_namespace where nspname = %(schema)s 511 ) 512 ) and 513 ( 514 select attnum 515 from pg_attribute 516 where 517 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 518 select oid from pg_namespace where nspname = %(schema)s 519 )) 520 and 521 attname = %(col)s 522 ) = any(pgc.confkey) 523 """ 524 525 args = { 526 'schema': schema, 527 'tbl': table, 528 'col': column 529 } 530 531 rows, idx = run_ro_queries ( 532 link_obj = link_obj, 533 queries = [ 534 {'cmd': cmd, 'args': args} 535 ] 536 ) 537 538 return rows
539 #------------------------------------------------------------------------
540 -def get_child_tables(schema='public', table=None, link_obj=None):
541 """Return child tables of <table>.""" 542 cmd = u""" 543 select 544 pgn.nspname as namespace, 545 pgc.relname as table 546 from 547 pg_namespace pgn, 548 pg_class pgc 549 where 550 pgc.relnamespace = pgn.oid 551 and 552 pgc.oid in ( 553 select inhrelid from pg_inherits where inhparent = ( 554 select oid from pg_class where 555 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 556 relname = %(table)s 557 ) 558 )""" 559 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 560 return rows
561 #------------------------------------------------------------------------
562 -def table_exists(link_obj=None, schema=None, table=None):
563 """Returns false, true.""" 564 cmd = u""" 565 select exists ( 566 select 1 from information_schema.tables 567 where 568 table_schema = %s and 569 table_name = %s and 570 table_type = 'BASE TABLE' 571 )""" 572 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 573 return rows[0][0]
574 #------------------------------------------------------------------------
575 -def get_col_indices(cursor = None):
576 if cursor.description is None: 577 _log.error('no result description available: unused cursor or last query did not select rows') 578 return None 579 col_indices = {} 580 col_index = 0 581 for col_desc in cursor.description: 582 col_name = col_desc[0] 583 # a query like "select 1,2;" will return two columns of the same name ! 584 # hence adjust to that, note, however, that dict-style access won't work 585 # on results of such queries ... 586 if col_indices.has_key(col_name): 587 col_name = '%s_%s' % (col_name, col_index) 588 col_indices[col_name] = col_index 589 col_index += 1 590 591 return col_indices
592 #------------------------------------------------------------------------
593 -def get_col_defs(link_obj=None, schema='public', table=None):
594 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 595 col_names = [] 596 col_type = {} 597 for row in rows: 598 col_names.append(row[0]) 599 # map array types 600 if row[1].startswith('_'): 601 col_type[row[0]] = row[1][1:] + '[]' 602 else: 603 col_type[row[0]] = row[1] 604 col_defs = [] 605 col_defs.append(col_names) 606 col_defs.append(col_type) 607 return col_defs
608 #------------------------------------------------------------------------
609 -def get_col_names(link_obj=None, schema='public', table=None):
610 """Return column attributes of table""" 611 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 612 cols = [] 613 for row in rows: 614 cols.append(row[0]) 615 return cols
616 617 #------------------------------------------------------------------------ 618 # i18n functions 619 #------------------------------------------------------------------------
620 -def export_translations_from_database(filename=None):
621 tx_file = codecs.open(filename, 'wb', 'utf8') 622 tx_file.write(u'-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M')) 623 tx_file.write(u'-- - contains translations for each of [%s]\n' % u', '.join(get_translation_languages())) 624 tx_file.write(u'-- - user language is set to [%s]\n\n' % get_current_user_language()) 625 tx_file.write(u'-- Please email this file to <gnumed-devel@gnu.org>.\n') 626 tx_file.write(u'-- ----------------------------------------------------------------------------------------------\n\n') 627 tx_file.write(u'set default_transaction_read_only to off\n\n') 628 tx_file.write(u'\\unset ON_ERROR_STOP\n\n') 629 630 cmd = u'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig' 631 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 632 for row in rows: 633 line = u"select i18n.upd_tx(quote_literal(E'%s'), quote_literal(E'%s'), quote_literal(E'%s'));\n" % ( 634 row['lang'].replace("'", "\\'"), 635 row['orig'].replace("'", "\\'"), 636 row['trans'].replace("'", "\\'") 637 ) 638 tx_file.write(line) 639 tx_file.write(u'\n') 640 641 tx_file.write(u'\set ON_ERROR_STOP 1\n') 642 tx_file.close() 643 644 return True
645 #------------------------------------------------------------------------
646 -def delete_translation_from_database(link_obj=None, language=None, original=None):
647 cmd = u'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s' 648 args = {'lang': language, 'orig': original} 649 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True) 650 return True
651 652 #------------------------------------------------------------------------
653 -def update_translation_in_database(language=None, original=None, translation=None):
654 cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)' 655 args = {'lang': language, 'orig': original, 'trans': translation} 656 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False) 657 return args
658 659 #------------------------------------------------------------------------
660 -def get_translation_languages():
661 rows, idx = run_ro_queries ( 662 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 663 ) 664 return [ r[0] for r in rows ]
665 666 #------------------------------------------------------------------------
667 -def get_database_translations(language=None, order_by=None):
668 669 args = {'lang': language} 670 _log.debug('language [%s]', language) 671 672 if order_by is None: 673 order_by = u'ORDER BY %s' % order_by 674 else: 675 order_by = u'ORDER BY lang, orig' 676 677 if language is None: 678 cmd = u""" 679 SELECT DISTINCT ON (orig, lang) 680 lang, orig, trans 681 FROM (( 682 683 -- strings stored as translation keys whether translated or not 684 SELECT 685 NULL as lang, 686 ik.orig, 687 NULL AS trans 688 FROM 689 i18n.keys ik 690 691 ) UNION ALL ( 692 693 -- already translated strings 694 SELECT 695 it.lang, 696 it.orig, 697 it.trans 698 FROM 699 i18n.translations it 700 701 )) as translatable_strings 702 %s""" % order_by 703 else: 704 cmd = u""" 705 SELECT DISTINCT ON (orig, lang) 706 lang, orig, trans 707 FROM (( 708 709 -- strings stored as translation keys whether translated or not 710 SELECT 711 %%(lang)s as lang, 712 ik.orig, 713 i18n._(ik.orig, %%(lang)s) AS trans 714 FROM 715 i18n.keys ik 716 717 ) UNION ALL ( 718 719 -- already translated strings 720 SELECT 721 %%(lang)s as lang, 722 it.orig, 723 i18n._(it.orig, %%(lang)s) AS trans 724 FROM 725 i18n.translations it 726 727 )) AS translatable_strings 728 %s""" % order_by 729 730 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 731 732 if rows is None: 733 _log.error('no translatable strings found') 734 else: 735 _log.debug('%s translatable strings found', len(rows)) 736 737 return rows
738 739 #------------------------------------------------------------------------
740 -def get_current_user_language():
741 cmd = u'select i18n.get_curr_lang()' 742 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 743 return rows[0][0]
744 745 #------------------------------------------------------------------------
746 -def set_user_language(user=None, language=None):
747 """Set the user language in the database. 748 749 user = None: current db user 750 language = None: unset 751 """ 752 _log.info('setting database language for user [%s] to [%s]', user, language) 753 754 args = { 755 'usr': user, 756 'lang': language 757 } 758 759 if language is None: 760 if user is None: 761 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 762 else: 763 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 764 queries.append({'cmd': u'select True'}) 765 else: 766 if user is None: 767 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 768 else: 769 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 770 771 rows, idx = run_rw_queries(queries = queries, return_data = True) 772 773 if not rows[0][0]: 774 _log.error('cannot set database language to [%s] for user [%s]', language, user) 775 776 return rows[0][0]
777 #------------------------------------------------------------------------
778 -def force_user_language(language=None):
779 """Set the user language in the database. 780 781 - regardless of whether there is any translation available. 782 - only for the current user 783 """ 784 _log.info('forcing database language for current db user to [%s]', language) 785 786 run_rw_queries(queries = [{ 787 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 788 'args': {'lang': language} 789 }])
790 #------------------------------------------------------------------------ 791 #------------------------------------------------------------------------ 792 text_expansion_keywords = None 793
794 -def get_text_expansion_keywords():
795 global text_expansion_keywords 796 if text_expansion_keywords is not None: 797 return text_expansion_keywords 798 799 cmd = u"""select keyword, public_expansion, private_expansion, owner from clin.v_keyword_expansions""" 800 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 801 text_expansion_keywords = rows 802 803 _log.info('retrieved %s text expansion keywords', len(text_expansion_keywords)) 804 805 return text_expansion_keywords
806 #------------------------------------------------------------------------
807 -def expand_keyword(keyword = None):
808 809 # Easter Egg ;-) 810 if keyword == u'$$steffi': 811 return u'Hai, play ! Versucht das ! (Keks dazu ?) :-)' 812 813 cmd = u"""select expansion from clin.v_your_keyword_expansions where keyword = %(kwd)s""" 814 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 815 816 if len(rows) == 0: 817 return None 818 819 return rows[0]['expansion']
820 #------------------------------------------------------------------------
821 -def get_keyword_expansion_candidates(keyword = None):
822 823 if keyword is None: 824 return [] 825 826 get_text_expansion_keywords() 827 828 candidates = [] 829 for kwd in text_expansion_keywords: 830 if kwd['keyword'].startswith(keyword): 831 candidates.append(kwd['keyword']) 832 833 return candidates
834 #------------------------------------------------------------------------
835 -def add_text_expansion(keyword=None, expansion=None, public=None):
836 837 if public: 838 cmd = u"select 1 from clin.v_keyword_expansions where public_expansion is true and keyword = %(kwd)s" 839 else: 840 cmd = u"select 1 from clin.v_your_keyword_expansions where private_expansion is true and keyword = %(kwd)s" 841 842 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 843 if len(rows) != 0: 844 return False 845 846 if public: 847 cmd = u""" 848 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 849 values (%(kwd)s, %(exp)s, null)""" 850 else: 851 cmd = u""" 852 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 853 values (%(kwd)s, %(exp)s, (select pk from dem.staff where db_user = current_user))""" 854 855 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword, 'exp': expansion}}]) 856 857 global text_expansion_keywords 858 text_expansion_keywords = None 859 860 return True
861 #------------------------------------------------------------------------
862 -def delete_text_expansion(keyword):
863 cmd = u""" 864 delete from clin.keyword_expansion where 865 keyword = %(kwd)s and ( 866 (fk_staff = (select pk from dem.staff where db_user = current_user)) 867 or 868 (fk_staff is null and owner = current_user) 869 )""" 870 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 871 872 global text_expansion_keywords 873 text_expansion_keywords = None
874 #------------------------------------------------------------------------
875 -def edit_text_expansion(keyword, expansion):
876 877 cmd1 = u""" 878 delete from clin.keyword_expansion where 879 keyword = %(kwd)s and 880 fk_staff = (select pk from dem.staff where db_user = current_user)""" 881 882 cmd2 = u""" 883 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 884 values (%(kwd)s, %(exp)s, (select pk from dem.staff where db_user = current_user))""" 885 886 rows, idx = run_rw_queries(queries = [ 887 {'cmd': cmd1, 'args': {'kwd': keyword}}, 888 {'cmd': cmd2, 'args': {'kwd': keyword, 'exp': expansion}}, 889 ]) 890 891 global text_expansion_keywords 892 text_expansion_keywords = None
893 # ======================================================================= 894 # query runners and helpers 895 # =======================================================================
896 -def send_maintenance_notification():
897 cmd = u'notify "db_maintenance_warning:"' 898 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
899 #------------------------------------------------------------------------
900 -def send_maintenance_shutdown():
901 cmd = u'notify "db_maintenance_disconnect:"' 902 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
903 #------------------------------------------------------------------------
904 -def is_pg_interval(candidate=None):
905 cmd = u'select %(candidate)s::interval' 906 try: 907 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 908 return True 909 except: 910 cmd = u'select %(candidate)s::text::interval' 911 try: 912 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 913 return True 914 except: 915 return False
916 #------------------------------------------------------------------------
917 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
918 outfile = file(filename, 'wb') 919 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 920 outfile.close() 921 return result
922 #------------------------------------------------------------------------
923 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
924 """Store data from a bytea field into a file. 925 926 <data_query> 927 - dict {'cmd': ..., 'args': ...} 928 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 929 - 'args' must be a dict 930 - must return one row with one field of type bytea 931 <file> 932 - must be a file like Python object 933 <data_size> 934 - integer of the total size of the expected data or None 935 <data_size_query> 936 - dict {'cmd': ..., 'args': ...} 937 - cmd must be unicode 938 - must return one row with one field with the octet_length() of the data field 939 - used only when <data_size> is None 940 """ 941 if data_size == 0: 942 return True 943 944 # If the client sets an encoding other than the default we 945 # will receive encoding-parsed data which isn't the binary 946 # content we want. Hence we need to get our own connection. 947 # It must be a read-write one so that we don't affect the 948 # encoding for other users of the shared read-only 949 # connections. 950 # Actually, encodings shouldn't be applied to binary data 951 # (eg. bytea types) in the first place but that is only 952 # reported to be fixed > v7.4. 953 # further tests reveal that at least on PG 8.0 this bug still 954 # manifests itself 955 conn = get_raw_connection(readonly=True) 956 957 if data_size is None: 958 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 959 data_size = rows[0][0] 960 if data_size in [None, 0]: 961 conn.rollback() 962 return True 963 964 _log.debug('expecting bytea data of size: [%s] bytes' % data_size) 965 _log.debug('using chunk size of: [%s] bytes' % chunk_size) 966 967 # chunk size of 0 means "retrieve whole field at once" 968 if chunk_size == 0: 969 chunk_size = data_size 970 _log.debug('chunk size [0] bytes: retrieving all data at once') 971 972 # Windoze sucks: it can't transfer objects of arbitrary size, 973 # anyways, we need to split the transfer, 974 # however, only possible if postgres >= 7.2 975 needed_chunks, remainder = divmod(data_size, chunk_size) 976 _log.debug('chunks to retrieve: [%s]' % needed_chunks) 977 _log.debug('remainder to retrieve: [%s] bytes' % remainder) 978 979 # retrieve chunks, skipped if data size < chunk size, 980 # does this not carry the danger of cutting up multi-byte escape sequences ? 981 # no, since bytea is binary, 982 # yes, since in bytea there are *some* escaped values, still 983 # no, since those are only escaped during *transfer*, not on-disk, hence 984 # only complete escape sequences are put on the wire 985 for chunk_id in range(needed_chunks): 986 chunk_start = (chunk_id * chunk_size) + 1 987 data_query['args']['start'] = chunk_start 988 data_query['args']['size'] = chunk_size 989 try: 990 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 991 except: 992 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 993 conn.rollback() 994 raise 995 # it would be a fatal error to see more than one result as ids are supposed to be unique 996 file_obj.write(str(rows[0][0])) 997 998 # retrieve remainder 999 if remainder > 0: 1000 chunk_start = (needed_chunks * chunk_size) + 1 1001 data_query['args']['start'] = chunk_start 1002 data_query['args']['size'] = remainder 1003 try: 1004 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1005 except: 1006 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 1007 conn.rollback() 1008 raise 1009 # it would be a fatal error to see more than one result as ids are supposed to be unique 1010 file_obj.write(str(rows[0][0])) 1011 1012 conn.rollback() 1013 return True
1014 #------------------------------------------------------------------------
1015 -def file2bytea(query=None, filename=None, args=None, conn=None):
1016 """Store data from a file into a bytea field. 1017 1018 The query must: 1019 - be in unicode 1020 - contain a format spec identifying the row (eg a primary key) 1021 matching <args> if it is an UPDATE 1022 - contain a format spec %(data)s::bytea 1023 """ 1024 # read data from file 1025 infile = file(filename, "rb") 1026 data_as_byte_string = infile.read() 1027 infile.close() 1028 if args is None: 1029 args = {} 1030 args['data'] = buffer(data_as_byte_string) 1031 del(data_as_byte_string) 1032 1033 # insert the data 1034 if conn is None: 1035 conn = get_raw_connection(readonly=False) 1036 close_conn = True 1037 else: 1038 close_conn = False 1039 1040 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 1041 1042 if close_conn: 1043 conn.close() 1044 1045 return
1046 #------------------------------------------------------------------------
1047 -def sanitize_pg_regex(expression=None, escape_all=False):
1048 """Escape input for use in a PostgreSQL regular expression. 1049 1050 If a fragment comes from user input and is to be used 1051 as a regular expression we need to make sure it doesn't 1052 contain invalid regex patterns such as unbalanced ('s. 1053 1054 <escape_all> 1055 True: try to escape *all* metacharacters 1056 False: only escape those which render the regex invalid 1057 """ 1058 return expression.replace ( 1059 '(', '\(' 1060 ).replace ( 1061 ')', '\)' 1062 ).replace ( 1063 '[', '\[' 1064 ).replace ( 1065 '+', '\+' 1066 ).replace ( 1067 '.', '\.' 1068 ).replace ( 1069 '*', '\*' 1070 )
1071 #']', '\]', # not needed 1072 #------------------------------------------------------------------------
1073 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1074 """Run read-only queries. 1075 1076 <queries> must be a list of dicts: 1077 [ 1078 {'cmd': <string>, 'args': <dict> or <tuple>}, 1079 {...}, 1080 ... 1081 ] 1082 """ 1083 if isinstance(link_obj, dbapi._psycopg.cursor): 1084 curs = link_obj 1085 curs_close = __noop 1086 tx_rollback = __noop 1087 elif isinstance(link_obj, dbapi._psycopg.connection): 1088 curs = link_obj.cursor() 1089 curs_close = curs.close 1090 tx_rollback = link_obj.rollback 1091 elif link_obj is None: 1092 conn = get_connection(readonly=True, verbose=verbose) 1093 curs = conn.cursor() 1094 curs_close = curs.close 1095 tx_rollback = conn.rollback 1096 else: 1097 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 1098 1099 if verbose: 1100 _log.debug('cursor: %s', curs) 1101 1102 for query in queries: 1103 if type(query['cmd']) is not types.UnicodeType: 1104 print "run_ro_queries(): non-unicode query" 1105 print query['cmd'] 1106 try: 1107 args = query['args'] 1108 except KeyError: 1109 args = None 1110 try: 1111 curs.execute(query['cmd'], args) 1112 if verbose: 1113 _log.debug('ran query: [%s]', curs.query) 1114 _log.debug('PG status message: %s', curs.statusmessage) 1115 _log.debug('cursor description: %s', str(curs.description)) 1116 except: 1117 # FIXME: use .pgcode 1118 try: 1119 curs_close() 1120 except dbapi.InterfaceError: 1121 _log.exception('cannot close cursor') 1122 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1123 _log.error('query failed: [%s]', curs.query) 1124 _log.error('PG status message: %s', curs.statusmessage) 1125 raise 1126 1127 data = None 1128 col_idx = None 1129 if return_data: 1130 data = curs.fetchall() 1131 if verbose: 1132 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1133 _log.debug('cursor description: %s', str(curs.description)) 1134 if get_col_idx: 1135 col_idx = get_col_indices(curs) 1136 1137 curs_close() 1138 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1139 return (data, col_idx)
1140 #------------------------------------------------------------------------
1141 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1142 """Convenience function for running a transaction 1143 that is supposed to get committed. 1144 1145 <link_obj> 1146 can be either: 1147 - a cursor 1148 - a connection 1149 1150 <queries> 1151 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1152 to be executed as a single transaction, the last 1153 query may usefully return rows (such as a 1154 "select currval('some_sequence')" statement) 1155 1156 <end_tx> 1157 - controls whether the transaction is finalized (eg. 1158 committed/rolled back) or not, this allows the 1159 call to run_rw_queries() to be part of a framing 1160 transaction 1161 - if link_obj is a connection then <end_tx> will 1162 default to False unless it is explicitly set to 1163 True which is taken to mean "yes, you do have full 1164 control over the transaction" in which case the 1165 transaction is properly finalized 1166 - if link_obj is a cursor we CANNOT finalize the 1167 transaction because we would need the connection for that 1168 - if link_obj is None <end_tx> will, of course, always be True 1169 1170 <return_data> 1171 - if true, the returned data will include the rows 1172 the last query selected 1173 - if false, it returns None instead 1174 1175 <get_col_idx> 1176 - if true, the returned data will include a dictionary 1177 mapping field names to column positions 1178 - if false, the returned data returns None instead 1179 1180 method result: 1181 - returns a tuple (data, idx) 1182 - <data>: 1183 * (None, None) if last query did not return rows 1184 * ("fetchall() result", <index>) if last query returned any rows 1185 * for <index> see <get_col_idx> 1186 """ 1187 if isinstance(link_obj, dbapi._psycopg.cursor): 1188 conn_close = __noop 1189 conn_commit = __noop 1190 conn_rollback = __noop 1191 curs = link_obj 1192 curs_close = __noop 1193 elif isinstance(link_obj, dbapi._psycopg.connection): 1194 conn_close = __noop 1195 if end_tx: 1196 conn_commit = link_obj.commit 1197 conn_rollback = link_obj.rollback 1198 else: 1199 conn_commit = __noop 1200 conn_rollback = __noop 1201 curs = link_obj.cursor() 1202 curs_close = curs.close 1203 elif link_obj is None: 1204 conn = get_connection(readonly=False) 1205 conn_close = conn.close 1206 conn_commit = conn.commit 1207 conn_rollback = conn.rollback 1208 curs = conn.cursor() 1209 curs_close = curs.close 1210 else: 1211 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1212 1213 for query in queries: 1214 if type(query['cmd']) is not types.UnicodeType: 1215 print "run_rw_queries(): non-unicode query" 1216 print query['cmd'] 1217 try: 1218 args = query['args'] 1219 except KeyError: 1220 args = None 1221 try: 1222 curs.execute(query['cmd'], args) 1223 except: 1224 _log.exception('error running RW query') 1225 gmLog2.log_stack_trace() 1226 try: 1227 curs_close() 1228 conn_rollback() 1229 conn_close() 1230 except dbapi.InterfaceError: 1231 _log.exception('cannot cleanup') 1232 raise 1233 raise 1234 1235 data = None 1236 col_idx = None 1237 if return_data: 1238 try: 1239 data = curs.fetchall() 1240 except: 1241 _log.exception('error fetching data from RW query') 1242 gmLog2.log_stack_trace() 1243 try: 1244 curs_close() 1245 conn_rollback() 1246 conn_close() 1247 except dbapi.InterfaceError: 1248 _log.exception('cannot cleanup') 1249 raise 1250 raise 1251 if get_col_idx: 1252 col_idx = get_col_indices(curs) 1253 1254 curs_close() 1255 conn_commit() 1256 conn_close() 1257 1258 return (data, col_idx)
1259 #------------------------------------------------------------------------
1260 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
1261 """Generates SQL for an INSERT query. 1262 1263 values: dict of values keyed by field to insert them into 1264 """ 1265 if schema is None: 1266 schema = u'public' 1267 1268 fields = values.keys() # that way val_snippets and fields really should end up in the same order 1269 val_snippets = [] 1270 for field in fields: 1271 val_snippets.append(u'%%(%s)s' % field) 1272 1273 if returning is None: 1274 returning = u'' 1275 return_data = False 1276 else: 1277 returning = u'\n\tRETURNING\n\t\t%s' % u', '.join(returning) 1278 return_data = True 1279 1280 cmd = u"""\nINSERT INTO quote_ident(%s.%s) ( 1281 quote_ident(%s) 1282 ) VALUES ( 1283 %s 1284 )%s""" % ( 1285 schema, 1286 table, 1287 u'),\n\t\tquote_ident('.join(fields), 1288 u',\n\t\t'.join(val_snippets), 1289 returning 1290 ) 1291 1292 _log.debug(u'running SQL: >>>%s<<<', cmd) 1293 1294 return run_rw_queries ( 1295 link_obj = link_obj, 1296 queries = [{'cmd': cmd, 'args': values}], 1297 end_tx = end_tx, 1298 return_data = return_data, 1299 get_col_idx = get_col_idx, 1300 verbose = verbose 1301 )
1302 # ======================================================================= 1303 # connection handling API 1304 # -----------------------------------------------------------------------
1305 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1306 """ 1307 GNUmed database connection pool. 1308 1309 Extends psycopg2's PersistentConnectionPool with 1310 a custom _connect() function. Supports one connection 1311 per thread - which also ties it to one particular DSN. 1312 """ 1313 #--------------------------------------------------
1314 - def _connect(self, key=None):
1315 1316 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1317 1318 conn.original_close = conn.close 1319 conn.close = _raise_exception_on_ro_conn_close 1320 1321 if key is not None: 1322 self._used[key] = conn 1323 self._rused[id(conn)] = key 1324 else: 1325 self._pool.append(conn) 1326 1327 return conn
1328 #--------------------------------------------------
1329 - def shutdown(self):
1330 for conn_key in self._used.keys(): 1331 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1332 self._used[conn_key].original_close()
1333 # -----------------------------------------------------------------------
1334 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1335 """Get a raw, unadorned connection. 1336 1337 - this will not set any parameters such as encoding, timezone, datestyle 1338 - the only requirement is a valid DSN 1339 - hence it can be used for "service" connections 1340 for verifying encodings etc 1341 """ 1342 # FIXME: support verbose 1343 if dsn is None: 1344 dsn = get_default_dsn() 1345 1346 try: 1347 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1348 except dbapi.OperationalError, e: 1349 1350 t, v, tb = sys.exc_info() 1351 try: 1352 msg = e.args[0] 1353 except (AttributeError, IndexError, TypeError): 1354 raise 1355 1356 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1357 1358 if msg.find('fe_sendauth') != -1: 1359 raise cAuthenticationError, (dsn, msg), tb 1360 1361 if regex.search('user ".*" does not exist', msg) is not None: 1362 raise cAuthenticationError, (dsn, msg), tb 1363 1364 if msg.find('uthenti') != -1: 1365 raise cAuthenticationError, (dsn, msg), tb 1366 1367 raise 1368 1369 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1370 1371 # do first-time stuff 1372 global postgresql_version 1373 if postgresql_version is None: 1374 curs = conn.cursor() 1375 curs.execute (""" 1376 select 1377 (split_part(setting, '.', 1) || '.' || split_part(setting, '.', 2))::numeric as version 1378 from pg_settings 1379 where name='server_version'""" 1380 ) 1381 postgresql_version = curs.fetchone()['version'] 1382 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1383 try: 1384 curs.execute("select pg_size_pretty(pg_database_size(current_database()))") 1385 _log.info('database size: %s', curs.fetchone()[0]) 1386 except: 1387 pass 1388 if verbose: 1389 __log_PG_settings(curs=curs) 1390 curs.close() 1391 conn.commit() 1392 1393 if _default_client_timezone is None: 1394 __detect_client_timezone(conn = conn) 1395 1396 curs = conn.cursor() 1397 1398 # set access mode 1399 if readonly: 1400 _log.debug('access mode [READ ONLY]') 1401 cmd = 'set session characteristics as transaction READ ONLY' 1402 curs.execute(cmd) 1403 cmd = 'set default_transaction_read_only to on' 1404 curs.execute(cmd) 1405 else: 1406 _log.debug('access mode [READ WRITE]') 1407 cmd = 'set session characteristics as transaction READ WRITE' 1408 curs.execute(cmd) 1409 cmd = 'set default_transaction_read_only to off' 1410 curs.execute(cmd) 1411 1412 curs.close() 1413 conn.commit() 1414 1415 conn.is_decorated = False 1416 1417 return conn
1418 # =======================================================================
1419 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1420 """Get a new connection. 1421 1422 This assumes the locale system has been initialized 1423 unless an encoding is specified. 1424 """ 1425 # FIXME: support pooled on RW, too 1426 # FIXME: for now, support the default DSN only 1427 if pooled and readonly and (dsn is None): 1428 global __ro_conn_pool 1429 if __ro_conn_pool is None: 1430 __ro_conn_pool = cConnectionPool ( 1431 minconn = 1, 1432 maxconn = 2, 1433 dsn = dsn, 1434 verbose = verbose 1435 ) 1436 conn = __ro_conn_pool.getconn() 1437 else: 1438 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1439 1440 if conn.is_decorated: 1441 return conn 1442 1443 if encoding is None: 1444 encoding = _default_client_encoding 1445 if encoding is None: 1446 encoding = gmI18N.get_encoding() 1447 _log.warning('client encoding not specified') 1448 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1449 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1450 1451 # set connection properties 1452 # 1) client encoding 1453 try: 1454 conn.set_client_encoding(encoding) 1455 except dbapi.OperationalError: 1456 t, v, tb = sys.exc_info() 1457 if str(v).find("can't set encoding to") != -1: 1458 raise cEncodingError, (encoding, v), tb 1459 raise 1460 1461 # 2) transaction isolation level 1462 if readonly: 1463 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED) 1464 iso_level = u'read committed' 1465 else: 1466 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1467 iso_level = u'serializable' 1468 1469 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s], datestyle [ISO], sql_inheritance [ON]', encoding, iso_level, _default_client_timezone) 1470 1471 curs = conn.cursor() 1472 1473 # client time zone 1474 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1475 1476 # datestyle 1477 # regarding DMY/YMD handling: since we force *input* to 1478 # ISO, too, the DMY/YMD setting is not needed 1479 cmd = "set datestyle to 'ISO'" 1480 curs.execute(cmd) 1481 1482 # SQL inheritance mode 1483 cmd = 'set sql_inheritance to on' 1484 curs.execute(cmd) 1485 1486 # version string 1487 global postgresql_version_string 1488 if postgresql_version_string is None: 1489 curs.execute('select version()') 1490 postgresql_version_string = curs.fetchone()['version'] 1491 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1492 1493 curs.close() 1494 conn.commit() 1495 1496 conn.is_decorated = True 1497 1498 return conn
1499 #-----------------------------------------------------------------------
1500 -def shutdown():
1501 if __ro_conn_pool is None: 1502 return 1503 __ro_conn_pool.shutdown()
1504 # ====================================================================== 1505 # internal helpers 1506 #-----------------------------------------------------------------------
1507 -def __noop():
1508 pass
1509 #-----------------------------------------------------------------------
1510 -def _raise_exception_on_ro_conn_close():
1511 raise TypeError(u'close() called on read-only connection')
1512 #-----------------------------------------------------------------------
1513 -def sanity_check_time_skew(tolerance=60):
1514 """Check server time and local time to be within 1515 the given tolerance of each other. 1516 1517 tolerance: seconds 1518 """ 1519 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1520 1521 cmd = u"select now() at time zone 'UTC'" 1522 conn = get_raw_connection(readonly=True) 1523 curs = conn.cursor() 1524 1525 start = time.time() 1526 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1527 end = time.time() 1528 client_now_as_utc = pydt.datetime.utcnow() 1529 1530 curs.close() 1531 conn.commit() 1532 1533 server_now_as_utc = rows[0][0] 1534 query_duration = end - start 1535 _log.info('server "now" (UTC): %s', server_now_as_utc) 1536 _log.info('client "now" (UTC): %s', client_now_as_utc) 1537 _log.debug('wire roundtrip (seconds): %s', query_duration) 1538 1539 if query_duration > tolerance: 1540 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1541 return False 1542 1543 if server_now_as_utc > client_now_as_utc: 1544 real_skew = server_now_as_utc - client_now_as_utc 1545 else: 1546 real_skew = client_now_as_utc - server_now_as_utc 1547 1548 _log.debug('client/server time skew: %s', real_skew) 1549 1550 if real_skew > pydt.timedelta(seconds = tolerance): 1551 _log.error('client/server time skew > tolerance') 1552 return False 1553 1554 return True
1555 #-----------------------------------------------------------------------
1556 -def sanity_check_database_settings():
1557 """Checks database settings. 1558 1559 returns (status, message) 1560 status: 1561 0: no problem 1562 1: non-fatal problem 1563 2: fatal problem 1564 """ 1565 _log.debug('checking database settings') 1566 settings = { 1567 # setting: [expected value, risk, fatal?] 1568 u'allow_system_table_mods': [u'off', u'system breakage', False], 1569 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1570 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1571 u'fsync': [u'on', u'data loss/corruption', True], 1572 u'full_page_writes': [u'on', u'data loss/corruption', False], 1573 u'lc_messages': [u'C', u'suboptimal error detection', False], 1574 u'password_encryption': [u'on', u'breach of confidentiality', False], 1575 #u'regex_flavor': [u'advanced', u'query breakage', False], # 9.0 doesn't support this anymore, default now advanced anyway 1576 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1577 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1578 } 1579 1580 from Gnumed.pycommon import gmCfg2 1581 _cfg = gmCfg2.gmCfgData() 1582 if _cfg.get(option = u'hipaa'): 1583 settings[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1584 settings[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1585 else: 1586 settings[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1587 settings[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1588 1589 cmd = u"select name, setting from pg_settings where name in %(settings)s" 1590 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'settings': tuple(settings.keys())}}]) 1591 1592 found_error = False 1593 found_problem = False 1594 msg = [] 1595 for row in rows: 1596 if row[1] != settings[row[0]][0]: 1597 if settings[row[0]][2] is True: 1598 found_error = True 1599 elif settings[row[0]][2] is False: 1600 found_problem = True 1601 elif settings[row[0]][2] is None: 1602 pass 1603 else: 1604 _log.error(settings[row[0]]) 1605 raise ValueError(u'invalid database configuration sanity check') 1606 msg.append(_(' option [%s]: %s') % (row[0], row[1])) 1607 msg.append(_(' risk: %s') % settings[row[0]][1]) 1608 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (row[0], row[1], settings[row[0]][0], settings[row[0]][1])) 1609 1610 if found_error: 1611 return 2, u'\n'.join(msg) 1612 1613 if found_problem: 1614 return 1, u'\n'.join(msg) 1615 1616 return 0, u''
1617 #------------------------------------------------------------------------
1618 -def __log_PG_settings(curs=None):
1619 # don't use any of the run_*()s since that might 1620 # create a loop if we fail here 1621 # FIXME: use pg_settings 1622 try: 1623 curs.execute(u'show all') 1624 except: 1625 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1626 return False 1627 settings = curs.fetchall() 1628 if settings is None: 1629 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1630 return False 1631 for setting in settings: 1632 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1633 return True
1634 # =======================================================================
1635 -def extract_msg_from_pg_exception(exc=None):
1636 1637 try: 1638 msg = exc.args[0] 1639 except (AttributeError, IndexError, TypeError): 1640 return u'cannot extract message from exception' 1641 1642 return unicode(msg, gmI18N.get_encoding(), 'replace')
1643 # =======================================================================
1644 -class cAuthenticationError(dbapi.OperationalError):
1645
1646 - def __init__(self, dsn=None, prev_val=None):
1647 self.dsn = dsn 1648 self.prev_val = prev_val
1649
1650 - def __str__(self):
1651 _log.warning('%s.__str__() called', self.__class__.__name__) 1652 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1653 _log.error(tmp) 1654 return tmp.encode(gmI18N.get_encoding(), 'replace')
1655
1656 - def __unicode__(self):
1657 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1658 1659 # ======================================================================= 1660 # custom psycopg2 extensions 1661 # =======================================================================
1662 -class cEncodingError(dbapi.OperationalError):
1663
1664 - def __init__(self, encoding=None, prev_val=None):
1665 self.encoding = encoding 1666 self.prev_val = prev_val
1667
1668 - def __str__(self):
1669 _log.warning('%s.__str__() called', self.__class__.__name__) 1670 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1671
1672 - def __unicode__(self):
1673 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1674 1675 # ----------------------------------------------------------------------- 1676 # Python -> PostgreSQL 1677 # ----------------------------------------------------------------------- 1678 # test when Squeeze (and thus psycopg2 2.2 becomes Stable
1679 -class cAdapterPyDateTime(object):
1680
1681 - def __init__(self, dt):
1682 if dt.tzinfo is None: 1683 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1684 self.__dt = dt
1685
1686 - def getquoted(self):
1687 return _timestamp_template % self.__dt.isoformat()
1688 1689 # remove for 0.9 1690 # ---------------------------------------------------------------------- 1691 #class cAdapterMxDateTime(object): 1692 # 1693 # def __init__(self, dt): 1694 # if dt.tz == '???': 1695 # _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1696 # self.__dt = dt 1697 # 1698 # def getquoted(self): 1699 # # under some locale settings the mx.DateTime ISO formatter 1700 # # will insert "," into the ISO string, 1701 # # while this is allowed per the ISO8601 spec PostgreSQL 1702 # # cannot currently handle that, 1703 # # so map those "," to "." to make things work: 1704 # return mxDT.ISO.str(self.__dt).replace(',', '.') 1705 # 1706 # ---------------------------------------------------------------------- 1707 # PostgreSQL -> Python 1708 # ---------------------------------------------------------------------- 1709 1710 #======================================================================= 1711 # main 1712 #----------------------------------------------------------------------- 1713 1714 # make sure psycopg2 knows how to handle unicode ... 1715 # intended to become standard 1716 # test when Squeeze (and thus psycopg2 2.2 becomes Stable 1717 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1718 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1719 1720 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1721 # check in 0.9: 1722 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1723 1724 # remove for 0.9 1725 try: 1726 import mx.DateTime as mxDT 1727 # psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime) 1728 except ImportError: 1729 _log.warning('cannot import mx.DateTime') 1730 1731 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1732 # them adapted to "... ARRAY()..." so we can support PG arrays 1733 1734 #======================================================================= 1735 if __name__ == "__main__": 1736 1737 if len(sys.argv) < 2: 1738 sys.exit() 1739 1740 if sys.argv[1] != 'test': 1741 sys.exit() 1742 1743 logging.basicConfig(level=logging.DEBUG) 1744 #--------------------------------------------------------------------
1745 - def test_file2bytea():
1746 run_rw_queries(queries = [ 1747 {'cmd': u'create table test_bytea (data bytea)'} 1748 ]) 1749 1750 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1751 try: 1752 file2bytea(query = cmd, filename = sys.argv[2]) 1753 except: 1754 _log.exception('error') 1755 1756 run_rw_queries(queries = [ 1757 {'cmd': u'drop table test_bytea'} 1758 ])
1759 #--------------------------------------------------------------------
1760 - def test_get_connection():
1761 print "testing get_connection()" 1762 1763 dsn = 'foo' 1764 try: 1765 conn = get_connection(dsn=dsn) 1766 except dbapi.OperationalError, e: 1767 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1768 t, v = sys.exc_info()[:2] 1769 print ' ', t 1770 print ' ', v 1771 1772 dsn = 'dbname=gnumed_v9' 1773 try: 1774 conn = get_connection(dsn=dsn) 1775 except cAuthenticationError: 1776 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1777 t, v = sys.exc_info()[:2] 1778 print ' ', t 1779 print ' ', v 1780 1781 dsn = 'dbname=gnumed_v9 user=abc' 1782 try: 1783 conn = get_connection(dsn=dsn) 1784 except cAuthenticationError: 1785 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1786 t, v = sys.exc_info()[:2] 1787 print ' ', t 1788 print ' ', v 1789 1790 dsn = 'dbname=gnumed_v9 user=any-doc' 1791 try: 1792 conn = get_connection(dsn=dsn) 1793 except cAuthenticationError: 1794 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1795 t, v = sys.exc_info()[:2] 1796 print ' ', t 1797 print ' ', v 1798 1799 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1800 try: 1801 conn = get_connection(dsn=dsn) 1802 except cAuthenticationError: 1803 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1804 t, v = sys.exc_info()[:2] 1805 print ' ', t 1806 print ' ', v 1807 1808 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1809 conn = get_connection(dsn=dsn, readonly=True) 1810 1811 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1812 conn = get_connection(dsn=dsn, readonly=False) 1813 1814 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1815 encoding = 'foo' 1816 try: 1817 conn = get_connection(dsn=dsn, encoding=encoding) 1818 except cEncodingError: 1819 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1820 t, v = sys.exc_info()[:2] 1821 print ' ', t 1822 print ' ', v
1823 #--------------------------------------------------------------------
1824 - def test_exceptions():
1825 print "testing exceptions" 1826 1827 try: 1828 raise cAuthenticationError('no dsn', 'no previous exception') 1829 except cAuthenticationError: 1830 t, v, tb = sys.exc_info() 1831 print t 1832 print v 1833 print tb 1834 1835 try: 1836 raise cEncodingError('no dsn', 'no previous exception') 1837 except cEncodingError: 1838 t, v, tb = sys.exc_info() 1839 print t 1840 print v 1841 print tb
1842 #--------------------------------------------------------------------
1843 - def test_ro_queries():
1844 print "testing run_ro_queries()" 1845 1846 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1847 conn = get_connection(dsn, readonly=True) 1848 1849 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'select version()'}], return_data=True, get_col_idx=True, verbose=True) 1850 print data 1851 print idx 1852 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'select 1'}], return_data=True, get_col_idx=True) 1853 print data 1854 print idx 1855 1856 curs = conn.cursor() 1857 1858 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'select version()'}], return_data=True, get_col_idx=True, verbose=True) 1859 print data 1860 print idx 1861 1862 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'select 1'}], return_data=True, get_col_idx=True, verbose=True) 1863 print data 1864 print idx 1865 1866 try: 1867 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 1868 print data 1869 print idx 1870 except psycopg2.ProgrammingError: 1871 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 1872 t, v = sys.exc_info()[:2] 1873 print ' ', t 1874 print ' ', v 1875 1876 curs.close()
1877 #--------------------------------------------------------------------
1878 - def test_request_dsn():
1879 conn = get_connection() 1880 print conn 1881 conn.close()
1882 #--------------------------------------------------------------------
1883 - def test_set_encoding():
1884 print "testing set_default_client_encoding()" 1885 1886 enc = 'foo' 1887 try: 1888 set_default_client_encoding(enc) 1889 print "SUCCESS: encoding [%s] worked" % enc 1890 except ValueError: 1891 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1892 t, v = sys.exc_info()[:2] 1893 print ' ', t 1894 print ' ', v 1895 1896 enc = '' 1897 try: 1898 set_default_client_encoding(enc) 1899 print "SUCCESS: encoding [%s] worked" % enc 1900 except ValueError: 1901 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1902 t, v = sys.exc_info()[:2] 1903 print ' ', t 1904 print ' ', v 1905 1906 enc = 'latin1' 1907 try: 1908 set_default_client_encoding(enc) 1909 print "SUCCESS: encoding [%s] worked" % enc 1910 except ValueError: 1911 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1912 t, v = sys.exc_info()[:2] 1913 print ' ', t 1914 print ' ', v 1915 1916 enc = 'utf8' 1917 try: 1918 set_default_client_encoding(enc) 1919 print "SUCCESS: encoding [%s] worked" % enc 1920 except ValueError: 1921 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1922 t, v = sys.exc_info()[:2] 1923 print ' ', t 1924 print ' ', v 1925 1926 enc = 'unicode' 1927 try: 1928 set_default_client_encoding(enc) 1929 print "SUCCESS: encoding [%s] worked" % enc 1930 except ValueError: 1931 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1932 t, v = sys.exc_info()[:2] 1933 print ' ', t 1934 print ' ', v 1935 1936 enc = 'UNICODE' 1937 try: 1938 set_default_client_encoding(enc) 1939 print "SUCCESS: encoding [%s] worked" % enc 1940 except ValueError: 1941 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1942 t, v = sys.exc_info()[:2] 1943 print ' ', t 1944 print ' ', v
1945 #--------------------------------------------------------------------
1946 - def test_connection_pool():
1947 dsn = get_default_dsn() 1948 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 1949 print pool 1950 print pool.getconn() 1951 print pool.getconn() 1952 print pool.getconn() 1953 print type(pool.getconn())
1954 #--------------------------------------------------------------------
1955 - def test_list_args():
1956 dsn = get_default_dsn() 1957 conn = get_connection(dsn, readonly=True) 1958 curs = conn.cursor() 1959 curs.execute('select * from clin.clin_narrative where narrative = %s', ['a'])
1960 #--------------------------------------------------------------------
1961 - def test_sanitize_pg_regex():
1962 tests = [ 1963 ['(', '\\('] 1964 , ['[', '\\['] 1965 , [')', '\\)'] 1966 ] 1967 for test in tests: 1968 result = sanitize_pg_regex(test[0]) 1969 if result != test[1]: 1970 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
1971 #--------------------------------------------------------------------
1972 - def test_is_pg_interval():
1973 status = True 1974 tests = [ 1975 [None, True], # None == NULL == succeeds ! 1976 [1, True], 1977 ['1', True], 1978 ['abc', False] 1979 ] 1980 1981 if not is_pg_interval(): 1982 print 'ERROR: is_pg_interval() returned "False", expected "True"' 1983 status = False 1984 1985 for test in tests: 1986 result = is_pg_interval(test[0]) 1987 if result != test[1]: 1988 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 1989 status = False 1990 1991 return status
1992 #--------------------------------------------------------------------
1993 - def test_sanity_check_time_skew():
1994 sanity_check_time_skew()
1995 #--------------------------------------------------------------------
1996 - def test_keyword_expansion():
1997 print "keywords, from database:" 1998 print get_text_expansion_keywords() 1999 print "keywords, cached:" 2000 print get_text_expansion_keywords() 2001 print "'$keyword' expands to:" 2002 print expand_keyword(keyword = u'$dvt')
2003 #--------------------------------------------------------------------
2004 - def test_get_foreign_key_details():
2005 for row in get_foreign_keys2column ( 2006 schema = u'dem', 2007 table = u'identity', 2008 column = u'pk' 2009 ): 2010 print '%s.%s references %s.%s.%s' % ( 2011 row['referencing_table'], 2012 row['referencing_column'], 2013 row['referenced_schema'], 2014 row['referenced_table'], 2015 row['referenced_column'] 2016 )
2017 #--------------------------------------------------------------------
2018 - def test_set_user_language():
2019 # (user, language, result, exception type) 2020 tests = [ 2021 # current user 2022 [None, 'de_DE', True], 2023 [None, 'lang_w/o_tx', False], 2024 [None, None, True], 2025 # valid user 2026 ['any-doc', 'de_DE', True], 2027 ['any-doc', 'lang_w/o_tx', False], 2028 ['any-doc', None, True], 2029 # invalid user 2030 ['invalid user', 'de_DE', None], 2031 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 2032 ['invalid user', None, True] 2033 ] 2034 for test in tests: 2035 try: 2036 result = set_user_language(user = test[0], language = test[1]) 2037 if result != test[2]: 2038 print "test:", test 2039 print "result:", result, "expected:", test[2] 2040 except psycopg2.IntegrityError, e: 2041 if test[2] is None: 2042 continue 2043 print "test:", test 2044 print "expected exception" 2045 print "result:", e
2046 #--------------------------------------------------------------------
2047 - def test_get_schema_revision_history():
2048 for line in get_schema_revision_history(): 2049 print u' - '.join(line)
2050 #--------------------------------------------------------------------
2051 - def test_run_query():
2052 gmDateTime.init() 2053 args = {'dt': gmDateTime.pydt_max_here()} 2054 cmd = u"select %(dt)s" 2055 2056 #cmd = u"select 'infinity'::timestamp with time zone" 2057 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 2058 print rows
2059 #-------------------------------------------------------------------- 2060 # run tests 2061 #test_file2bytea() 2062 #test_get_connection() 2063 #test_exceptions() 2064 #test_ro_queries() 2065 #test_request_dsn() 2066 #test_set_encoding() 2067 #test_connection_pool() 2068 #test_list_args() 2069 #test_sanitize_pg_regex() 2070 #test_is_pg_interval() 2071 #test_sanity_check_time_skew() 2072 #test_keyword_expansion() 2073 #test_get_foreign_key_details() 2074 #test_set_user_language() 2075 #test_get_schema_revision_history() 2076 test_run_query() 2077 2078 # ====================================================================== 2079