Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  15  __license__ = 'GPL v2 or later (details at http://www.gnu.org)' 
  16   
  17  # stdlib 
  18  import time 
  19  import sys 
  20  import os 
  21  import codecs 
  22  import types 
  23  import logging 
  24  import datetime as pydt 
  25  import re as regex 
  26   
  27   
  28  # GNUmed 
  29  if __name__ == '__main__': 
  30          sys.path.insert(0, '../../') 
  31  from Gnumed.pycommon import gmLoginInfo 
  32  from Gnumed.pycommon import gmExceptions 
  33  from Gnumed.pycommon import gmDateTime 
  34  from Gnumed.pycommon import gmBorg 
  35  from Gnumed.pycommon import gmI18N 
  36  from Gnumed.pycommon import gmLog2 
  37  from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character 
  38   
  39  _log = logging.getLogger('gm.db') 
  40   
  41   
  42  # 3rd party 
  43  try: 
  44          import psycopg2 as dbapi 
  45  except ImportError: 
  46          _log.exception("Python database adapter psycopg2 not found.") 
  47          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  48          raise 
  49   
  50   
  51  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  52  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  53  if not (float(dbapi.apilevel) >= 2.0): 
  54          raise ImportError('gmPG2: supported DB-API level too low') 
  55  if not (dbapi.threadsafety > 0): 
  56          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  57  if not (dbapi.paramstyle == 'pyformat'): 
  58          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  59  try: 
  60          dbapi.__version__.index('dt') 
  61  except ValueError: 
  62          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  63  try: 
  64          dbapi.__version__.index('ext') 
  65  except ValueError: 
  66          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  67  try: 
  68          dbapi.__version__.index('pq3') 
  69  except ValueError: 
  70          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  71   
  72  import psycopg2.extras 
  73  import psycopg2.extensions 
  74  import psycopg2.pool 
  75  import psycopg2.errorcodes as sql_error_codes 
  76   
  77  # ======================================================================= 
  78  _default_client_encoding = 'UTF8' 
  79  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  80   
  81  # things timezone 
  82  _default_client_timezone = None                 # default time zone for connections 
  83  _sql_set_timezone = None 
  84  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  85  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  86   
  87  _default_dsn = None 
  88  _default_login = None 
  89   
  90  postgresql_version_string = None 
  91  postgresql_version = None                       # accuracy: major.minor 
  92   
  93  __ro_conn_pool = None 
  94   
  95  auto_request_login_params = True 
  96  # ======================================================================= 
  97  # global data 
  98  # ======================================================================= 
  99   
 100  known_schema_hashes = { 
 101          0: 'not released, testing only', 
 102          2: 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
 103          3: 'e73718eaf230d8f1d2d01afa8462e176', 
 104          4: '4428ccf2e54c289136819e701bb095ea', 
 105          5: '7e7b093af57aea48c288e76632a382e5',  # ... old (v1) style hashes 
 106          6: '90e2026ac2efd236da9c8608b8685b2d',  # new (v2) style hashes ... 
 107          7: '6c9f6d3981483f8e9433df99d1947b27', 
 108          8: '89b13a7af83337c3aad153b717e52360', 
 109          9: '641a9b2be3c378ffc2bb2f0b1c9f051d', 
 110          10: '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 111          11: '03042ae24f3f92877d986fb0a6184d76', 
 112          12: '06183a6616db62257e22814007a8ed07', 
 113          13: 'fab7c1ae408a6530c47f9b5111a0841e', 
 114          14: 'e170d543f067d1ea60bfe9076b1560cf', 
 115          15: '70012ff960b77ecdff4981c94b5b55b6', 
 116          16: '0bcf44ca22c479b52976e5eda1de8161', 
 117          17: '161428ee97a00e3bf56168c3a15b7b50' 
 118  } 
 119   
 120  map_schema_hash2version = { 
 121          'b09d50d7ed3f91ddf4c4ddb8ea507720': 2, 
 122          'e73718eaf230d8f1d2d01afa8462e176': 3, 
 123          '4428ccf2e54c289136819e701bb095ea': 4, 
 124          '7e7b093af57aea48c288e76632a382e5': 5, 
 125          '90e2026ac2efd236da9c8608b8685b2d': 6, 
 126          '6c9f6d3981483f8e9433df99d1947b27': 7, 
 127          '89b13a7af83337c3aad153b717e52360': 8, 
 128          '641a9b2be3c378ffc2bb2f0b1c9f051d': 9, 
 129          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 10, 
 130          '03042ae24f3f92877d986fb0a6184d76': 11, 
 131          '06183a6616db62257e22814007a8ed07': 12, 
 132          'fab7c1ae408a6530c47f9b5111a0841e': 13, 
 133          'e170d543f067d1ea60bfe9076b1560cf': 14, 
 134          '70012ff960b77ecdff4981c94b5b55b6': 15, 
 135          '0bcf44ca22c479b52976e5eda1de8161': 16, 
 136          '161428ee97a00e3bf56168c3a15b7b50': 17 
 137  } 
 138   
 139  map_client_branch2required_db_version = { 
 140          u'GIT tree': 0, 
 141          u'0.3': 9, 
 142          u'0.4': 10, 
 143          u'0.5': 11, 
 144          u'0.6': 12, 
 145          u'0.7': 13, 
 146          u'0.8': 14, 
 147          u'0.9': 15, 
 148          u'1.0': 16,             # intentional duplicate with 1.1 
 149          u'1.1': 16, 
 150          u'1.2': 17, 
 151          u'1.3': 18 
 152  } 
 153   
 154  # get columns and data types for a given table 
 155  query_table_col_defs = u"""select 
 156          cols.column_name, 
 157          cols.udt_name 
 158  from 
 159          information_schema.columns cols 
 160  where 
 161          cols.table_schema = %s 
 162                  and 
 163          cols.table_name = %s 
 164  order by 
 165          cols.ordinal_position""" 
 166   
 167  query_table_attributes = u"""select 
 168          cols.column_name 
 169  from 
 170          information_schema.columns cols 
 171  where 
 172          cols.table_schema = %s 
 173                  and 
 174          cols.table_name = %s 
 175  order by 
 176          cols.ordinal_position""" 
 177   
 178  # ======================================================================= 
 179  # module globals API 
 180  # ======================================================================= 
181 -def set_default_client_encoding(encoding = None):
182 # check whether psycopg2 can handle this encoding 183 if encoding not in psycopg2.extensions.encodings: 184 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 185 # check whether Python can handle this encoding 186 py_enc = psycopg2.extensions.encodings[encoding] 187 try: 188 codecs.lookup(py_enc) 189 except LookupError: 190 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 191 raise 192 # FIXME: check encoding against the database 193 # FIXME: - but we may not yet have access 194 # FIXME: - psycopg2 will pull its encodings from the database eventually 195 # it seems save to set it 196 global _default_client_encoding 197 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 198 _default_client_encoding = encoding 199 return True
200 #---------------------------------------------------
201 -def set_default_client_timezone(timezone = None):
202 203 # FIXME: use __validate 204 global _default_client_timezone 205 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 206 _default_client_timezone = timezone 207 208 global _sql_set_timezone 209 _sql_set_timezone = u'set timezone to %s' 210 211 return True
212 #---------------------------------------------------
213 -def __validate_timezone(conn=None, timezone=None):
214 215 _log.debug(u'validating time zone [%s]', timezone) 216 217 cmd = u'set timezone to %(tz)s' 218 args = {u'tz': timezone} 219 220 conn.commit() 221 curs = conn.cursor() 222 is_valid = False 223 try: 224 curs.execute(cmd, args) 225 _log.info(u'time zone [%s] is settable', timezone) 226 # can we actually use it, though ? 227 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 228 try: 229 curs.execute(cmd) 230 curs.fetchone() 231 _log.info(u'time zone [%s] is usable', timezone) 232 is_valid = True 233 except: 234 _log.error('error using time zone [%s]', timezone) 235 except dbapi.DataError: 236 _log.warning(u'time zone [%s] is not settable', timezone) 237 except: 238 _log.error(u'failed to set time zone to [%s]', timezone) 239 _log.exception(u'') 240 241 curs.close() 242 conn.rollback() 243 244 return is_valid
245 #---------------------------------------------------
246 -def __expand_timezone(conn=None, timezone=None):
247 """some timezone defs are abbreviations so try to expand 248 them because "set time zone" doesn't take abbreviations""" 249 250 cmd = u""" 251 select distinct on (abbrev) name 252 from pg_timezone_names 253 where 254 abbrev = %(tz)s and 255 name ~ '^[^/]+/[^/]+$' and 256 name !~ '^Etc/' 257 """ 258 args = {u'tz': timezone} 259 260 conn.commit() 261 curs = conn.cursor() 262 263 result = timezone 264 try: 265 curs.execute(cmd, args) 266 rows = curs.fetchall() 267 if len(rows) > 0: 268 result = rows[0][0] 269 _log.debug(u'[%s] maps to [%s]', timezone, result) 270 except: 271 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 272 273 curs.close() 274 conn.rollback() 275 276 return result
277 #---------------------------------------------------
278 -def __detect_client_timezone(conn=None):
279 """This is run on the very first connection.""" 280 281 # FIXME: check whether server.timezone is the same 282 # FIXME: value as what we eventually detect 283 284 # we need gmDateTime to be initialized 285 if gmDateTime.current_local_iso_numeric_timezone_string is None: 286 gmDateTime.init() 287 288 _log.debug('trying to detect timezone from system') 289 290 tz_candidates = [] 291 try: 292 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 293 tz_candidates.append(tz) 294 expanded = __expand_timezone(conn = conn, timezone = tz) 295 if expanded != tz: 296 tz_candidates.append(expanded) 297 except KeyError: 298 pass 299 300 tz_candidates.append(gmDateTime.current_local_timezone_name) 301 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 302 if expanded != gmDateTime.current_local_timezone_name: 303 tz_candidates.append(expanded) 304 305 _log.debug('candidates: %s', str(tz_candidates)) 306 307 # find best among candidates 308 global _default_client_timezone 309 global _sql_set_timezone 310 found = False 311 for tz in tz_candidates: 312 if __validate_timezone(conn = conn, timezone = tz): 313 _default_client_timezone = tz 314 _sql_set_timezone = u'set timezone to %s' 315 found = True 316 break 317 318 if not found: 319 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 320 _sql_set_timezone = u"set time zone interval %s hour to minute" 321 322 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
323 # ======================================================================= 324 # login API 325 # =======================================================================
326 -def __request_login_params_tui():
327 """Text mode request of database login parameters""" 328 import getpass 329 login = gmLoginInfo.LoginInfo() 330 331 print "\nPlease enter the required login parameters:" 332 try: 333 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '') 334 login.database = prompted_input(prompt = "database", default = 'gnumed_v18') 335 login.user = prompted_input(prompt = "user name", default = '') 336 tmp = 'password for "%s" (not shown): ' % login.user 337 login.password = getpass.getpass(tmp) 338 login.port = prompted_input(prompt = "port", default = 5432) 339 except KeyboardInterrupt: 340 _log.warning("user cancelled text mode login dialog") 341 print "user cancelled text mode login dialog" 342 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 343 344 return login
345 #---------------------------------------------------
346 -def __request_login_params_gui_wx():
347 """GUI (wx) input request for database login parameters. 348 349 Returns gmLoginInfo.LoginInfo object 350 """ 351 import wx 352 # OK, wxPython was already loaded. But has the main Application instance 353 # been initialized yet ? if not, the exception will kick us out 354 if wx.GetApp() is None: 355 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 356 357 # Let's launch the login dialog 358 # if wx was not initialized /no main App loop, an exception should be raised anyway 359 import gmAuthWidgets 360 dlg = gmAuthWidgets.cLoginDialog(None, -1) 361 dlg.ShowModal() 362 login = dlg.panel.GetLoginInfo() 363 dlg.Destroy() 364 365 #if user cancelled or something else went wrong, raise an exception 366 if login is None: 367 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 368 369 return login
370 #---------------------------------------------------
371 -def request_login_params():
372 """Request login parameters for database connection.""" 373 # do we auto-request parameters at all ? 374 if not auto_request_login_params: 375 raise Exception('Cannot request login parameters.') 376 377 # are we inside X ? 378 # (if we aren't wxGTK will crash hard at 379 # C-level with "can't open Display") 380 if os.environ.has_key('DISPLAY'): 381 # try wxPython GUI 382 try: return __request_login_params_gui_wx() 383 except: pass 384 385 # well, either we are on the console or 386 # wxPython does not work, use text mode 387 return __request_login_params_tui()
388 389 # ======================================================================= 390 # DSN API 391 # -----------------------------------------------------------------------
392 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
393 dsn_parts = [] 394 395 if (database is not None) and (database.strip() != ''): 396 dsn_parts.append('dbname=%s' % database) 397 398 if (host is not None) and (host.strip() != ''): 399 dsn_parts.append('host=%s' % host) 400 401 if (port is not None) and (str(port).strip() != ''): 402 dsn_parts.append('port=%s' % port) 403 404 if (user is not None) and (user.strip() != ''): 405 dsn_parts.append('user=%s' % user) 406 407 if (password is not None) and (password.strip() != ''): 408 dsn_parts.append('password=%s' % password) 409 410 dsn_parts.append('sslmode=prefer') 411 412 return ' '.join(dsn_parts)
413 # ------------------------------------------------------
414 -def get_default_login():
415 # make sure we do have a login 416 get_default_dsn() 417 return _default_login
418 # ------------------------------------------------------
419 -def get_default_dsn():
420 global _default_dsn 421 if _default_dsn is not None: 422 return _default_dsn 423 424 login = request_login_params() 425 set_default_login(login=login) 426 427 return _default_dsn
428 # ------------------------------------------------------
429 -def set_default_login(login=None):
430 if login is None: 431 return False 432 433 if login.host is not None: 434 if login.host.strip() == u'': 435 login.host = None 436 437 global _default_login 438 _default_login = login 439 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 440 441 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 442 443 global _default_dsn 444 if _default_dsn is None: 445 old_dsn = u'None' 446 else: 447 old_dsn = regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, _default_dsn) 448 _log.info ('setting default DSN from [%s] to [%s]', 449 old_dsn, 450 regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, dsn) 451 ) 452 _default_dsn = dsn 453 454 return True
455 # ======================================================================= 456 # netadata API 457 # =======================================================================
458 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
459 expected_hash = known_schema_hashes[version] 460 if version == 0: 461 args = {'ver': 9999} 462 else: 463 args = {'ver': version} 464 rows, idx = run_ro_queries ( 465 link_obj = link_obj, 466 queries = [{ 467 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 468 'args': args 469 }] 470 ) 471 if rows[0]['md5'] != expected_hash: 472 _log.error('database schema version mismatch') 473 _log.error('expected: %s (%s)' % (version, expected_hash)) 474 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 475 if verbose: 476 _log.debug('schema dump follows:') 477 for line in get_schema_structure(link_obj=link_obj).split(): 478 _log.debug(line) 479 _log.debug('schema revision history dump follows:') 480 for line in get_schema_revision_history(link_obj=link_obj): 481 _log.debug(u' - '.join(line)) 482 return False 483 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 484 return True
485 #------------------------------------------------------------------------
486 -def get_schema_version(link_obj=None):
487 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 488 try: 489 return map_schema_hash2version[rows[0]['md5']] 490 except KeyError: 491 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
492 #------------------------------------------------------------------------
493 -def get_schema_structure(link_obj=None):
494 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 495 return rows[0][0]
496 #------------------------------------------------------------------------
497 -def get_schema_hash(link_obj=None):
498 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 499 return rows[0]['md5']
500 #------------------------------------------------------------------------
501 -def get_schema_revision_history(link_obj=None):
502 cmd = u""" 503 select 504 imported::text, 505 version, 506 filename 507 from gm.schema_revision 508 order by imported 509 """ 510 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': cmd}]) 511 return rows
512 #------------------------------------------------------------------------
513 -def get_current_user():
514 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 515 return rows[0][0]
516 #------------------------------------------------------------------------
517 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
518 """Get the foreign keys pointing to schema.table.column. 519 520 Does not properly work with multi-column FKs. 521 GNUmed doesn't use any, however. 522 """ 523 cmd = u""" 524 select 525 %(schema)s as referenced_schema, 526 %(tbl)s as referenced_table, 527 %(col)s as referenced_column, 528 pgc.confkey as referenced_column_list, 529 pgc.conrelid::regclass as referencing_table, 530 pgc.conkey as referencing_column_list, 531 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 532 from 533 pg_constraint pgc 534 where 535 pgc.contype = 'f' 536 and 537 pgc.confrelid = ( 538 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 539 select oid from pg_namespace where nspname = %(schema)s 540 ) 541 ) and 542 ( 543 select attnum 544 from pg_attribute 545 where 546 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 547 select oid from pg_namespace where nspname = %(schema)s 548 )) 549 and 550 attname = %(col)s 551 ) = any(pgc.confkey) 552 """ 553 554 args = { 555 'schema': schema, 556 'tbl': table, 557 'col': column 558 } 559 560 rows, idx = run_ro_queries ( 561 link_obj = link_obj, 562 queries = [ 563 {'cmd': cmd, 'args': args} 564 ] 565 ) 566 567 return rows
568 #------------------------------------------------------------------------
569 -def get_child_tables(schema='public', table=None, link_obj=None):
570 """Return child tables of <table>.""" 571 cmd = u""" 572 select 573 pgn.nspname as namespace, 574 pgc.relname as table 575 from 576 pg_namespace pgn, 577 pg_class pgc 578 where 579 pgc.relnamespace = pgn.oid 580 and 581 pgc.oid in ( 582 select inhrelid from pg_inherits where inhparent = ( 583 select oid from pg_class where 584 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 585 relname = %(table)s 586 ) 587 )""" 588 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 589 return rows
590 #------------------------------------------------------------------------
591 -def schema_exists(link_obj=None, schema=u'gm'):
592 cmd = u"""SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = %(schema)s)""" 593 args = {'schema': schema} 594 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}]) 595 return rows[0][0]
596 #------------------------------------------------------------------------
597 -def table_exists(link_obj=None, schema=None, table=None):
598 """Returns false, true.""" 599 cmd = u""" 600 select exists ( 601 select 1 from information_schema.tables 602 where 603 table_schema = %s and 604 table_name = %s and 605 table_type = 'BASE TABLE' 606 )""" 607 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 608 return rows[0][0]
609 #------------------------------------------------------------------------
610 -def get_col_indices(cursor = None):
611 if cursor.description is None: 612 _log.error('no result description available: unused cursor or last query did not select rows') 613 return None 614 col_indices = {} 615 col_index = 0 616 for col_desc in cursor.description: 617 col_name = col_desc[0] 618 # a query like "select 1,2;" will return two columns of the same name ! 619 # hence adjust to that, note, however, that dict-style access won't work 620 # on results of such queries ... 621 if col_indices.has_key(col_name): 622 col_name = '%s_%s' % (col_name, col_index) 623 col_indices[col_name] = col_index 624 col_index += 1 625 626 return col_indices
627 #------------------------------------------------------------------------
628 -def get_col_defs(link_obj=None, schema='public', table=None):
629 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 630 col_names = [] 631 col_type = {} 632 for row in rows: 633 col_names.append(row[0]) 634 # map array types 635 if row[1].startswith('_'): 636 col_type[row[0]] = row[1][1:] + '[]' 637 else: 638 col_type[row[0]] = row[1] 639 col_defs = [] 640 col_defs.append(col_names) 641 col_defs.append(col_type) 642 return col_defs
643 #------------------------------------------------------------------------
644 -def get_col_names(link_obj=None, schema='public', table=None):
645 """Return column attributes of table""" 646 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 647 cols = [] 648 for row in rows: 649 cols.append(row[0]) 650 return cols
651 652 #------------------------------------------------------------------------ 653 # i18n functions 654 #------------------------------------------------------------------------
655 -def export_translations_from_database(filename=None):
656 tx_file = codecs.open(filename, 'wb', 'utf8') 657 tx_file.write(u'-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M')) 658 tx_file.write(u'-- - contains translations for each of [%s]\n' % u', '.join(get_translation_languages())) 659 tx_file.write(u'-- - user language is set to [%s]\n\n' % get_current_user_language()) 660 tx_file.write(u'-- Please email this file to <gnumed-devel@gnu.org>.\n') 661 tx_file.write(u'-- ----------------------------------------------------------------------------------------------\n\n') 662 tx_file.write(u'set default_transaction_read_only to off;\n\n') 663 tx_file.write(u"set client_encoding to 'utf-8';\n\n") 664 tx_file.write(u'\\unset ON_ERROR_STOP\n\n') 665 666 cmd = u'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig' 667 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 668 for row in rows: 669 line = u"select i18n.upd_tx(quote_literal(E'%s'), quote_literal(E'%s'), quote_literal(E'%s'));\n" % ( 670 row['lang'].replace("'", "\\'"), 671 row['orig'].replace("'", "\\'"), 672 row['trans'].replace("'", "\\'") 673 ) 674 tx_file.write(line) 675 tx_file.write(u'\n') 676 677 tx_file.write(u'\set ON_ERROR_STOP 1\n') 678 tx_file.close() 679 680 return True
681 #------------------------------------------------------------------------
682 -def delete_translation_from_database(link_obj=None, language=None, original=None):
683 cmd = u'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s' 684 args = {'lang': language, 'orig': original} 685 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True) 686 return True
687 688 #------------------------------------------------------------------------
689 -def update_translation_in_database(language=None, original=None, translation=None):
690 cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)' 691 args = {'lang': language, 'orig': original, 'trans': translation} 692 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False) 693 return args
694 695 #------------------------------------------------------------------------
696 -def get_translation_languages():
697 rows, idx = run_ro_queries ( 698 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 699 ) 700 return [ r[0] for r in rows ]
701 702 #------------------------------------------------------------------------
703 -def get_database_translations(language=None, order_by=None):
704 705 args = {'lang': language} 706 _log.debug('language [%s]', language) 707 708 if order_by is None: 709 order_by = u'ORDER BY %s' % order_by 710 else: 711 order_by = u'ORDER BY lang, orig' 712 713 if language is None: 714 cmd = u""" 715 SELECT DISTINCT ON (orig, lang) 716 lang, orig, trans 717 FROM (( 718 719 -- strings stored as translation keys whether translated or not 720 SELECT 721 NULL as lang, 722 ik.orig, 723 NULL AS trans 724 FROM 725 i18n.keys ik 726 727 ) UNION ALL ( 728 729 -- already translated strings 730 SELECT 731 it.lang, 732 it.orig, 733 it.trans 734 FROM 735 i18n.translations it 736 737 )) as translatable_strings 738 %s""" % order_by 739 else: 740 cmd = u""" 741 SELECT DISTINCT ON (orig, lang) 742 lang, orig, trans 743 FROM (( 744 745 -- strings stored as translation keys whether translated or not 746 SELECT 747 %%(lang)s as lang, 748 ik.orig, 749 i18n._(ik.orig, %%(lang)s) AS trans 750 FROM 751 i18n.keys ik 752 753 ) UNION ALL ( 754 755 -- already translated strings 756 SELECT 757 %%(lang)s as lang, 758 it.orig, 759 i18n._(it.orig, %%(lang)s) AS trans 760 FROM 761 i18n.translations it 762 763 )) AS translatable_strings 764 %s""" % order_by 765 766 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 767 768 if rows is None: 769 _log.error('no translatable strings found') 770 else: 771 _log.debug('%s translatable strings found', len(rows)) 772 773 return rows
774 775 #------------------------------------------------------------------------
776 -def get_current_user_language():
777 cmd = u'select i18n.get_curr_lang()' 778 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 779 return rows[0][0]
780 781 #------------------------------------------------------------------------
782 -def set_user_language(user=None, language=None):
783 """Set the user language in the database. 784 785 user = None: current db user 786 language = None: unset 787 """ 788 _log.info('setting database language for user [%s] to [%s]', user, language) 789 790 args = { 791 'usr': user, 792 'lang': language 793 } 794 795 if language is None: 796 if user is None: 797 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 798 else: 799 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 800 queries.append({'cmd': u'select True'}) 801 else: 802 if user is None: 803 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 804 else: 805 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 806 807 rows, idx = run_rw_queries(queries = queries, return_data = True) 808 809 if not rows[0][0]: 810 _log.error('cannot set database language to [%s] for user [%s]', language, user) 811 812 return rows[0][0]
813 #------------------------------------------------------------------------
814 -def force_user_language(language=None):
815 """Set the user language in the database. 816 817 - regardless of whether there is any translation available. 818 - only for the current user 819 """ 820 _log.info('forcing database language for current db user to [%s]', language) 821 822 run_rw_queries(queries = [{ 823 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 824 'args': {'lang': language} 825 }])
826 827 # ======================================================================= 828 # query runners and helpers 829 # =======================================================================
830 -def send_maintenance_notification():
831 cmd = u'notify "db_maintenance_warning:"' 832 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
833 #------------------------------------------------------------------------
834 -def send_maintenance_shutdown():
835 cmd = u'notify "db_maintenance_disconnect:"' 836 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
837 #------------------------------------------------------------------------
838 -def is_pg_interval(candidate=None):
839 cmd = u'SELECT %(candidate)s::interval' 840 try: 841 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 842 return True 843 except: 844 cmd = u'SELECT %(candidate)s::text::interval' 845 try: 846 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 847 return True 848 except: 849 return False
850 #------------------------------------------------------------------------
851 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
852 outfile = file(filename, 'wb') 853 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 854 outfile.close() 855 return result
856 #------------------------------------------------------------------------
857 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
858 """Store data from a bytea field into a file. 859 860 <data_query> 861 - dict {'cmd': ..., 'args': ...} 862 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 863 - 'args' must be a dict 864 - must return one row with one field of type bytea 865 <file> 866 - must be a file like Python object 867 <data_size> 868 - integer of the total size of the expected data or None 869 <data_size_query> 870 - dict {'cmd': ..., 'args': ...} 871 - cmd must be unicode 872 - must return one row with one field with the octet_length() of the data field 873 - used only when <data_size> is None 874 """ 875 if data_size == 0: 876 return True 877 878 # If the client sets an encoding other than the default we 879 # will receive encoding-parsed data which isn't the binary 880 # content we want. Hence we need to get our own connection. 881 # It must be a read-write one so that we don't affect the 882 # encoding for other users of the shared read-only 883 # connections. 884 # Actually, encodings shouldn't be applied to binary data 885 # (eg. bytea types) in the first place but that is only 886 # reported to be fixed > v7.4. 887 # further tests reveal that at least on PG 8.0 this bug still 888 # manifests itself 889 conn = get_raw_connection(readonly=True) 890 891 if data_size is None: 892 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 893 data_size = rows[0][0] 894 if data_size in [None, 0]: 895 conn.rollback() 896 return True 897 898 max_chunk_size = 1024 * 1024 * 20 # 20 MB, works for typical CR DICOMs 899 if chunk_size == 0: 900 chunk_size = min(data_size, max_chunk_size) 901 902 _log.debug('expecting bytea data of size: [%s] bytes' % data_size) 903 _log.debug('using chunk size of: [%s] bytes' % chunk_size) 904 905 # Windoze sucks: it can't transfer objects of arbitrary size, 906 # anyways, we need to split the transfer, 907 # however, only possible if postgres >= 7.2 908 needed_chunks, remainder = divmod(data_size, chunk_size) 909 _log.debug('chunks to retrieve: [%s]' % needed_chunks) 910 _log.debug('remainder to retrieve: [%s] bytes' % remainder) 911 912 # try setting "bytea_output" 913 # - fails if not necessary 914 # - succeeds if necessary 915 try: 916 run_ro_queries(link_obj = conn, queries = [{'cmd': u"set bytea_output to 'escape'"}]) 917 except dbapi.ProgrammingError: 918 _log.debug('failed to set bytea_output to "escape", not necessary') 919 920 # retrieve chunks, skipped if data size < chunk size, 921 # does this not carry the danger of cutting up multi-byte escape sequences ? 922 # no, since bytea is binary, 923 # yes, since in bytea there are *some* escaped values, still 924 # no, since those are only escaped during *transfer*, not on-disk, hence 925 # only complete escape sequences are put on the wire 926 for chunk_id in range(needed_chunks): 927 chunk_start = (chunk_id * chunk_size) + 1 928 data_query['args']['start'] = chunk_start 929 data_query['args']['size'] = chunk_size 930 try: 931 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 932 except: 933 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 934 conn.rollback() 935 raise 936 # it would be a fatal error to see more than one result as ids are supposed to be unique 937 file_obj.write(str(rows[0][0])) 938 939 # retrieve remainder 940 if remainder > 0: 941 chunk_start = (needed_chunks * chunk_size) + 1 942 data_query['args']['start'] = chunk_start 943 data_query['args']['size'] = remainder 944 try: 945 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 946 except: 947 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 948 conn.rollback() 949 raise 950 # it would be a fatal error to see more than one result as ids are supposed to be unique 951 file_obj.write(str(rows[0][0])) 952 953 conn.rollback() 954 return True
955 #------------------------------------------------------------------------
956 -def file2bytea(query=None, filename=None, args=None, conn=None):
957 """Store data from a file into a bytea field. 958 959 The query must: 960 - be in unicode 961 - contain a format spec identifying the row (eg a primary key) 962 matching <args> if it is an UPDATE 963 - contain a format spec %(data)s::bytea 964 """ 965 # read data from file 966 infile = file(filename, "rb") 967 data_as_byte_string = infile.read() 968 infile.close() 969 if args is None: 970 args = {} 971 args['data'] = buffer(data_as_byte_string) 972 del(data_as_byte_string) 973 974 # insert the data 975 if conn is None: 976 conn = get_raw_connection(readonly=False) 977 close_conn = True 978 else: 979 close_conn = False 980 981 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 982 983 if close_conn: 984 conn.close() 985 986 return
987 #------------------------------------------------------------------------
988 -def sanitize_pg_regex(expression=None, escape_all=False):
989 """Escape input for use in a PostgreSQL regular expression. 990 991 If a fragment comes from user input and is to be used 992 as a regular expression we need to make sure it doesn't 993 contain invalid regex patterns such as unbalanced ('s. 994 995 <escape_all> 996 True: try to escape *all* metacharacters 997 False: only escape those which render the regex invalid 998 """ 999 return expression.replace ( 1000 '(', '\(' 1001 ).replace ( 1002 ')', '\)' 1003 ).replace ( 1004 '[', '\[' 1005 ).replace ( 1006 '+', '\+' 1007 ).replace ( 1008 '.', '\.' 1009 ).replace ( 1010 '*', '\*' 1011 )
1012 #']', '\]', # not needed 1013 #------------------------------------------------------------------------
1014 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1015 """Run read-only queries. 1016 1017 <queries> must be a list of dicts: 1018 [ 1019 {'cmd': <string>, 'args': <dict> or <tuple>}, 1020 {...}, 1021 ... 1022 ] 1023 """ 1024 if isinstance(link_obj, dbapi._psycopg.cursor): 1025 curs = link_obj 1026 curs_close = __noop 1027 tx_rollback = __noop 1028 elif isinstance(link_obj, dbapi._psycopg.connection): 1029 curs = link_obj.cursor() 1030 curs_close = curs.close 1031 tx_rollback = link_obj.rollback 1032 elif link_obj is None: 1033 conn = get_connection(readonly=True, verbose=verbose) 1034 curs = conn.cursor() 1035 curs_close = curs.close 1036 tx_rollback = conn.rollback 1037 else: 1038 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 1039 1040 if verbose: 1041 _log.debug('cursor: %s', curs) 1042 1043 for query in queries: 1044 if type(query['cmd']) is not types.UnicodeType: 1045 print "run_ro_queries(): non-unicode query" 1046 print query['cmd'] 1047 try: 1048 args = query['args'] 1049 except KeyError: 1050 args = None 1051 try: 1052 curs.execute(query['cmd'], args) 1053 if verbose: 1054 _log.debug('ran query: [%s]', curs.query) 1055 if curs.statusmessage != u'': 1056 _log.debug('PG status message: %s', curs.statusmessage) 1057 _log.debug('cursor description: %s', str(curs.description)) 1058 except dbapi.Error as pg_exc: 1059 _log.error('query failed: [%s]', curs.query) 1060 if curs.statusmessage != u'': 1061 _log.error('PG status message: %s', curs.statusmessage) 1062 _log.error('PG error code: %s', pg_exc.pgcode) 1063 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1064 try: 1065 curs_close() 1066 except dbapi.InterfaceError: 1067 _log.exception('cannot close cursor') 1068 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1069 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1070 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1071 if curs.statusmessage != u'': 1072 details = u'Status: %s\n%s' % ( 1073 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1074 details 1075 ) 1076 raise gmExceptions.AccessDenied ( 1077 u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')), 1078 source = u'PostgreSQL', 1079 code = pg_exc.pgcode, 1080 details = details 1081 ) 1082 raise 1083 except: 1084 _log.error('query failed: [%s]', curs.query) 1085 if curs.statusmessage != u'': 1086 _log.error('PG status message: %s', curs.statusmessage) 1087 try: 1088 curs_close() 1089 except dbapi.InterfaceError: 1090 _log.exception('cannot close cursor') 1091 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1092 raise 1093 1094 data = None 1095 col_idx = None 1096 if return_data: 1097 data = curs.fetchall() 1098 if verbose: 1099 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1100 _log.debug('cursor description: %s', str(curs.description)) 1101 if get_col_idx: 1102 col_idx = get_col_indices(curs) 1103 1104 curs_close() 1105 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1106 return (data, col_idx)
1107 #------------------------------------------------------------------------
1108 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1109 """Convenience function for running a transaction 1110 that is supposed to get committed. 1111 1112 <link_obj> 1113 can be either: 1114 - a cursor 1115 - a connection 1116 1117 <queries> 1118 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1119 to be executed as a single transaction, the last 1120 query may usefully return rows (such as a 1121 "SELECT currval('some_sequence')" statement) 1122 1123 <end_tx> 1124 - controls whether the transaction is finalized (eg. 1125 committed/rolled back) or not, this allows the 1126 call to run_rw_queries() to be part of a framing 1127 transaction 1128 - if link_obj is a connection then <end_tx> will 1129 default to False unless it is explicitly set to 1130 True which is taken to mean "yes, you do have full 1131 control over the transaction" in which case the 1132 transaction is properly finalized 1133 - if link_obj is a cursor we CANNOT finalize the 1134 transaction because we would need the connection for that 1135 - if link_obj is None <end_tx> will, of course, always be True 1136 1137 <return_data> 1138 - if true, the returned data will include the rows 1139 the last query selected 1140 - if false, it returns None instead 1141 1142 <get_col_idx> 1143 - if true, the returned data will include a dictionary 1144 mapping field names to column positions 1145 - if false, the returned data returns None instead 1146 1147 method result: 1148 - returns a tuple (data, idx) 1149 - <data>: 1150 * (None, None) if last query did not return rows 1151 * ("fetchall() result", <index>) if last query returned any rows 1152 * for <index> see <get_col_idx> 1153 """ 1154 if isinstance(link_obj, dbapi._psycopg.cursor): 1155 conn_close = __noop 1156 conn_commit = __noop 1157 tx_rollback = __noop 1158 curs = link_obj 1159 curs_close = __noop 1160 elif isinstance(link_obj, dbapi._psycopg.connection): 1161 conn_close = __noop 1162 if end_tx: 1163 conn_commit = link_obj.commit 1164 tx_rollback = link_obj.rollback 1165 else: 1166 conn_commit = __noop 1167 tx_rollback = __noop 1168 curs = link_obj.cursor() 1169 curs_close = curs.close 1170 elif link_obj is None: 1171 conn = get_connection(readonly=False) 1172 conn_close = conn.close 1173 conn_commit = conn.commit 1174 tx_rollback = conn.rollback 1175 curs = conn.cursor() 1176 curs_close = curs.close 1177 else: 1178 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1179 1180 for query in queries: 1181 if type(query['cmd']) is not types.UnicodeType: 1182 print "run_rw_queries(): non-unicode query" 1183 print query['cmd'] 1184 try: 1185 args = query['args'] 1186 except KeyError: 1187 args = None 1188 try: 1189 curs.execute(query['cmd'], args) 1190 except dbapi.Error as pg_exc: 1191 _log.error('RW query failed: [%s]', curs.query) 1192 if curs.statusmessage != u'': 1193 _log.error('PG status message: %s', curs.statusmessage) 1194 _log.error('PG error code: %s', pg_exc.pgcode) 1195 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1196 try: 1197 curs_close() 1198 tx_rollback() # just for good measure 1199 conn_close() 1200 except dbapi.InterfaceError: 1201 _log.exception('cannot cleanup') 1202 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1203 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1204 if curs.statusmessage != u'': 1205 details = u'Status: %s\n%s' % ( 1206 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1207 details 1208 ) 1209 raise gmExceptions.AccessDenied ( 1210 u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')), 1211 source = u'PostgreSQL', 1212 code = pg_exc.pgcode, 1213 details = details 1214 ) 1215 raise 1216 except: 1217 _log.exception('error running RW query') 1218 gmLog2.log_stack_trace() 1219 try: 1220 curs_close() 1221 tx_rollback() 1222 conn_close() 1223 except dbapi.InterfaceError: 1224 _log.exception('cannot cleanup') 1225 raise 1226 raise 1227 1228 data = None 1229 col_idx = None 1230 if return_data: 1231 try: 1232 data = curs.fetchall() 1233 except: 1234 _log.exception('error fetching data from RW query') 1235 gmLog2.log_stack_trace() 1236 try: 1237 curs_close() 1238 tx_rollback() 1239 conn_close() 1240 except dbapi.InterfaceError: 1241 _log.exception('cannot cleanup') 1242 raise 1243 raise 1244 if get_col_idx: 1245 col_idx = get_col_indices(curs) 1246 1247 curs_close() 1248 conn_commit() 1249 conn_close() 1250 1251 return (data, col_idx)
1252 #------------------------------------------------------------------------
1253 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
1254 """Generates SQL for an INSERT query. 1255 1256 values: dict of values keyed by field to insert them into 1257 """ 1258 if schema is None: 1259 schema = u'public' 1260 1261 fields = values.keys() # that way val_snippets and fields really should end up in the same order 1262 val_snippets = [] 1263 for field in fields: 1264 val_snippets.append(u'%%(%s)s' % field) 1265 1266 if returning is None: 1267 returning = u'' 1268 return_data = False 1269 else: 1270 returning = u'\n\tRETURNING\n\t\t%s' % u', '.join(returning) 1271 return_data = True 1272 1273 cmd = u"""\nINSERT INTO %s.%s ( 1274 %s 1275 ) VALUES ( 1276 %s 1277 )%s""" % ( 1278 schema, 1279 table, 1280 u',\n\t\t'.join(fields), 1281 u',\n\t\t'.join(val_snippets), 1282 returning 1283 ) 1284 1285 _log.debug(u'running SQL: >>>%s<<<', cmd) 1286 1287 return run_rw_queries ( 1288 link_obj = link_obj, 1289 queries = [{'cmd': cmd, 'args': values}], 1290 end_tx = end_tx, 1291 return_data = return_data, 1292 get_col_idx = get_col_idx, 1293 verbose = verbose 1294 )
1295 # ======================================================================= 1296 # connection handling API 1297 # -----------------------------------------------------------------------
1298 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1299 """ 1300 GNUmed database connection pool. 1301 1302 Extends psycopg2's PersistentConnectionPool with 1303 a custom _connect() function. Supports one connection 1304 per thread - which also ties it to one particular DSN. 1305 """ 1306 #--------------------------------------------------
1307 - def _connect(self, key=None):
1308 1309 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1310 1311 conn.original_close = conn.close 1312 conn.close = _raise_exception_on_ro_conn_close 1313 1314 if key is not None: 1315 self._used[key] = conn 1316 self._rused[id(conn)] = key 1317 else: 1318 self._pool.append(conn) 1319 1320 return conn
1321 #--------------------------------------------------
1322 - def shutdown(self):
1323 for conn_key in self._used.keys(): 1324 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1325 self._used[conn_key].original_close()
1326 # -----------------------------------------------------------------------
1327 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1328 """Get a raw, unadorned connection. 1329 1330 - this will not set any parameters such as encoding, timezone, datestyle 1331 - the only requirement is a valid DSN 1332 - hence it can be used for "service" connections 1333 for verifying encodings etc 1334 """ 1335 # FIXME: support verbose 1336 if dsn is None: 1337 dsn = get_default_dsn() 1338 1339 if u'host=salaam.homeunix' in dsn: 1340 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.') 1341 1342 try: 1343 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1344 except dbapi.OperationalError, e: 1345 1346 t, v, tb = sys.exc_info() 1347 try: 1348 msg = e.args[0] 1349 except (AttributeError, IndexError, TypeError): 1350 raise 1351 1352 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1353 1354 if msg.find('fe_sendauth') != -1: 1355 raise cAuthenticationError, (dsn, msg), tb 1356 1357 if regex.search('user ".*" does not exist', msg) is not None: 1358 raise cAuthenticationError, (dsn, msg), tb 1359 1360 if msg.find('uthenti') != -1: 1361 raise cAuthenticationError, (dsn, msg), tb 1362 1363 raise 1364 1365 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1366 1367 # do first-time stuff 1368 global postgresql_version 1369 if postgresql_version is None: 1370 curs = conn.cursor() 1371 curs.execute(""" 1372 SELECT 1373 substring(setting, E'^\\\\d{1,2}\\\\.\\\\d{1,2}')::numeric AS version 1374 FROM 1375 pg_settings 1376 WHERE 1377 name = 'server_version' 1378 """) 1379 postgresql_version = curs.fetchone()['version'] 1380 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1381 try: 1382 curs.execute("SELECT pg_size_pretty(pg_database_size(current_database()))") 1383 _log.info('database size: %s', curs.fetchone()[0]) 1384 except: 1385 pass 1386 if verbose: 1387 __log_PG_settings(curs=curs) 1388 curs.close() 1389 conn.commit() 1390 1391 if _default_client_timezone is None: 1392 __detect_client_timezone(conn = conn) 1393 1394 curs = conn.cursor() 1395 1396 # set access mode 1397 if readonly: 1398 _log.debug('access mode [READ ONLY]') 1399 cmd = 'set session characteristics as transaction READ ONLY' 1400 curs.execute(cmd) 1401 cmd = 'set default_transaction_read_only to on' 1402 curs.execute(cmd) 1403 else: 1404 _log.debug('access mode [READ WRITE]') 1405 cmd = 'set session characteristics as transaction READ WRITE' 1406 curs.execute(cmd) 1407 cmd = 'set default_transaction_read_only to off' 1408 curs.execute(cmd) 1409 1410 curs.close() 1411 conn.commit() 1412 1413 conn.is_decorated = False 1414 1415 return conn
1416 # =======================================================================
1417 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1418 """Get a new connection. 1419 1420 This assumes the locale system has been initialized 1421 unless an encoding is specified. 1422 """ 1423 # FIXME: support pooled on RW, too 1424 # FIXME: for now, support the default DSN only 1425 if pooled and readonly and (dsn is None): 1426 global __ro_conn_pool 1427 if __ro_conn_pool is None: 1428 __ro_conn_pool = cConnectionPool ( 1429 minconn = 1, 1430 maxconn = 2, 1431 dsn = dsn, 1432 verbose = verbose 1433 ) 1434 conn = __ro_conn_pool.getconn() 1435 else: 1436 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1437 1438 if conn.is_decorated: 1439 return conn 1440 1441 if encoding is None: 1442 encoding = _default_client_encoding 1443 if encoding is None: 1444 encoding = gmI18N.get_encoding() 1445 _log.warning('client encoding not specified') 1446 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1447 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1448 1449 # set connection properties 1450 # - client encoding 1451 try: 1452 conn.set_client_encoding(encoding) 1453 except dbapi.OperationalError: 1454 t, v, tb = sys.exc_info() 1455 if str(v).find("can't set encoding to") != -1: 1456 raise cEncodingError, (encoding, v), tb 1457 raise 1458 1459 # - transaction isolation level 1460 if readonly: 1461 iso_level = u'read committed' 1462 else: 1463 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1464 iso_level = u'serializable' 1465 1466 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s]', encoding, iso_level, _default_client_timezone) 1467 1468 curs = conn.cursor() 1469 1470 # - client time zone 1471 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1472 1473 conn.commit() 1474 1475 # FIXME: remove this whole affair once either 9.0 is standard (Ubuntu 10 LTS is 1476 # FIXME: PG 8.4, however!) or else when psycopg2 supports a workaround 1477 # 1478 # - bytea data format 1479 # PG 9.0 switched to - by default - using "hex" rather than "escape", 1480 # however, psycopg2's linked with a pre-9.0 libpq do assume "escape" 1481 # as the transmission mode for bytea output, 1482 # so try to set this setting back to "escape", 1483 # if that's not possible the reason will be that PG < 9.0 does not support 1484 # that setting - which also means we don't need it and can ignore the 1485 # failure 1486 cmd = "set bytea_output to 'escape'" 1487 try: 1488 curs.execute(cmd) 1489 except dbapi.ProgrammingError: 1490 _log.error('cannot set bytea_output format') 1491 1492 curs.close() 1493 conn.commit() 1494 1495 conn.is_decorated = True 1496 1497 return conn
1498 #-----------------------------------------------------------------------
1499 -def shutdown():
1500 if __ro_conn_pool is None: 1501 return 1502 __ro_conn_pool.shutdown()
1503 # ====================================================================== 1504 # internal helpers 1505 #-----------------------------------------------------------------------
1506 -def __noop():
1507 pass
1508 #-----------------------------------------------------------------------
1509 -def _raise_exception_on_ro_conn_close():
1510 raise TypeError(u'close() called on read-only connection')
1511 #-----------------------------------------------------------------------
1512 -def log_database_access(action=None):
1513 run_insert ( 1514 schema = u'gm', 1515 table = u'access_log', 1516 values = {u'user_action': action}, 1517 end_tx = True 1518 )
1519 #-----------------------------------------------------------------------
1520 -def sanity_check_time_skew(tolerance=60):
1521 """Check server time and local time to be within 1522 the given tolerance of each other. 1523 1524 tolerance: seconds 1525 """ 1526 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1527 1528 cmd = u"SELECT now() at time zone 'UTC'" 1529 conn = get_raw_connection(readonly=True) 1530 curs = conn.cursor() 1531 1532 start = time.time() 1533 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1534 end = time.time() 1535 client_now_as_utc = pydt.datetime.utcnow() 1536 1537 curs.close() 1538 conn.commit() 1539 1540 server_now_as_utc = rows[0][0] 1541 query_duration = end - start 1542 _log.info('server "now" (UTC): %s', server_now_as_utc) 1543 _log.info('client "now" (UTC): %s', client_now_as_utc) 1544 _log.debug('wire roundtrip (seconds): %s', query_duration) 1545 1546 if query_duration > tolerance: 1547 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1548 return False 1549 1550 if server_now_as_utc > client_now_as_utc: 1551 real_skew = server_now_as_utc - client_now_as_utc 1552 else: 1553 real_skew = client_now_as_utc - server_now_as_utc 1554 1555 _log.debug('client/server time skew: %s', real_skew) 1556 1557 if real_skew > pydt.timedelta(seconds = tolerance): 1558 _log.error('client/server time skew > tolerance') 1559 return False 1560 1561 return True
1562 #-----------------------------------------------------------------------
1563 -def sanity_check_database_settings():
1564 """Checks database settings. 1565 1566 returns (status, message) 1567 status: 1568 0: no problem 1569 1: non-fatal problem 1570 2: fatal problem 1571 """ 1572 _log.debug('checking database settings') 1573 1574 conn = get_connection() 1575 1576 # - version string 1577 global postgresql_version_string 1578 if postgresql_version_string is None: 1579 curs = conn.cursor() 1580 curs.execute('SELECT version()') 1581 postgresql_version_string = curs.fetchone()['version'] 1582 curs.close() 1583 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1584 1585 options2check = { 1586 # setting: [expected value, risk, fatal?] 1587 u'allow_system_table_mods': [u'off', u'system breakage', False], 1588 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1589 u'datestyle': [u'ISO', u'faulty timestamp parsing', True], 1590 u'default_transaction_isolation': [u'read committed', u'faulty database reads', True], 1591 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1592 u'fsync': [u'on', u'data loss/corruption', True], 1593 u'full_page_writes': [u'on', u'data loss/corruption', False], 1594 u'lc_messages': [u'C', u'suboptimal error detection', False], 1595 u'password_encryption': [u'on', u'breach of confidentiality', False], 1596 u'regex_flavor': [u'advanced', u'query breakage', False], # 9.0 doesn't support this anymore, default now advanced anyway 1597 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1598 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1599 } 1600 1601 from Gnumed.pycommon import gmCfg2 1602 _cfg = gmCfg2.gmCfgData() 1603 if _cfg.get(option = u'hipaa'): 1604 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1605 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1606 else: 1607 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1608 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1609 1610 cmd = u"SELECT name, setting from pg_settings where name in %(settings)s" 1611 rows, idx = run_ro_queries ( 1612 link_obj = conn, 1613 queries = [{'cmd': cmd, 'args': {'settings': tuple(options2check.keys())}}], 1614 get_col_idx = False 1615 ) 1616 1617 found_error = False 1618 found_problem = False 1619 msg = [] 1620 for row in rows: 1621 option = row['name'] 1622 value_found = row['setting'] 1623 value_expected = options2check[option][0] 1624 risk = options2check[option][1] 1625 fatal_setting = options2check[option][2] 1626 if value_found != value_expected: 1627 if fatal_setting is True: 1628 found_error = True 1629 elif fatal_setting is False: 1630 found_problem = True 1631 elif fatal_setting is None: 1632 pass 1633 else: 1634 _log.error(options2check[option]) 1635 raise ValueError(u'invalid database configuration sanity check') 1636 msg.append(_(' option [%s]: %s') % (option, value_found)) 1637 msg.append(_(' risk: %s') % risk) 1638 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (option, value_found, value_expected, risk)) 1639 1640 if found_error: 1641 return 2, u'\n'.join(msg) 1642 1643 if found_problem: 1644 return 1, u'\n'.join(msg) 1645 1646 return 0, u''
1647 #------------------------------------------------------------------------
1648 -def __log_PG_settings(curs=None):
1649 # don't use any of the run_*()s since that might 1650 # create a loop if we fail here 1651 # FIXME: use pg_settings 1652 try: 1653 curs.execute(u'show all') 1654 except: 1655 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1656 return False 1657 settings = curs.fetchall() 1658 if settings is None: 1659 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1660 return False 1661 for setting in settings: 1662 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1663 return True
1664 # =======================================================================
1665 -def extract_msg_from_pg_exception(exc=None):
1666 1667 try: 1668 msg = exc.args[0] 1669 except (AttributeError, IndexError, TypeError): 1670 return u'cannot extract message from exception' 1671 1672 return unicode(msg, gmI18N.get_encoding(), 'replace')
1673 # =======================================================================
1674 -class cAuthenticationError(dbapi.OperationalError):
1675
1676 - def __init__(self, dsn=None, prev_val=None):
1677 self.dsn = dsn 1678 self.prev_val = prev_val
1679
1680 - def __str__(self):
1681 _log.warning('%s.__str__() called', self.__class__.__name__) 1682 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1683 _log.error(tmp) 1684 return tmp.encode(gmI18N.get_encoding(), 'replace')
1685
1686 - def __unicode__(self):
1687 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1688 1689 # ======================================================================= 1690 # custom psycopg2 extensions 1691 # =======================================================================
1692 -class cEncodingError(dbapi.OperationalError):
1693
1694 - def __init__(self, encoding=None, prev_val=None):
1695 self.encoding = encoding 1696 self.prev_val = prev_val
1697
1698 - def __str__(self):
1699 _log.warning('%s.__str__() called', self.__class__.__name__) 1700 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1701
1702 - def __unicode__(self):
1703 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1704 1705 # ----------------------------------------------------------------------- 1706 # Python -> PostgreSQL 1707 # ----------------------------------------------------------------------- 1708 # test when Squeeze (and thus psycopg2 2.2 becomes Stable
1709 -class cAdapterPyDateTime(object):
1710
1711 - def __init__(self, dt):
1712 if dt.tzinfo is None: 1713 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1714 self.__dt = dt
1715
1716 - def getquoted(self):
1717 return _timestamp_template % self.__dt.isoformat()
1718 1719 ## remove for 0.9 1720 ## ---------------------------------------------------------------------- 1721 ##class cAdapterMxDateTime(object): 1722 ## 1723 ## def __init__(self, dt): 1724 ## if dt.tz == '???': 1725 ## _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1726 ## self.__dt = dt 1727 ## 1728 ## def getquoted(self): 1729 ## # under some locale settings the mx.DateTime ISO formatter 1730 ## # will insert "," into the ISO string, 1731 ## # while this is allowed per the ISO8601 spec PostgreSQL 1732 ## # cannot currently handle that, 1733 ## # so map those "," to "." to make things work: 1734 ## return mxDT.ISO.str(self.__dt).replace(',', '.') 1735 ## 1736 ## ---------------------------------------------------------------------- 1737 ## PostgreSQL -> Python 1738 ## ---------------------------------------------------------------------- 1739 1740 #======================================================================= 1741 # main 1742 #----------------------------------------------------------------------- 1743 1744 # make sure psycopg2 knows how to handle unicode ... 1745 # intended to become standard 1746 # test when Squeeze (and thus psycopg2 2.2 becomes Stable 1747 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1748 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1749 1750 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1751 # check in 0.9: 1752 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1753 1754 ## remove for 0.9 1755 #try: 1756 # import mx.DateTime as mxDT 1757 ## psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime) 1758 #except ImportError: 1759 # _log.warning('cannot import mx.DateTime') 1760 1761 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1762 # them adapted to "... ARRAY[]..." so we can support PG arrays 1763 1764 #======================================================================= 1765 if __name__ == "__main__": 1766 1767 if len(sys.argv) < 2: 1768 sys.exit() 1769 1770 if sys.argv[1] != 'test': 1771 sys.exit() 1772 1773 logging.basicConfig(level=logging.DEBUG) 1774 #--------------------------------------------------------------------
1775 - def test_file2bytea():
1776 run_rw_queries(queries = [ 1777 {'cmd': u'create table test_bytea (data bytea)'} 1778 ]) 1779 1780 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1781 try: 1782 file2bytea(query = cmd, filename = sys.argv[2]) 1783 except: 1784 _log.exception('error') 1785 1786 run_rw_queries(queries = [ 1787 {'cmd': u'drop table test_bytea'} 1788 ])
1789 #--------------------------------------------------------------------
1790 - def test_get_connection():
1791 print "testing get_connection()" 1792 1793 dsn = 'foo' 1794 try: 1795 conn = get_connection(dsn=dsn) 1796 except dbapi.OperationalError, e: 1797 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1798 t, v = sys.exc_info()[:2] 1799 print ' ', t 1800 print ' ', v 1801 1802 dsn = 'dbname=gnumed_v9' 1803 try: 1804 conn = get_connection(dsn=dsn) 1805 except cAuthenticationError: 1806 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1807 t, v = sys.exc_info()[:2] 1808 print ' ', t 1809 print ' ', v 1810 1811 dsn = 'dbname=gnumed_v9 user=abc' 1812 try: 1813 conn = get_connection(dsn=dsn) 1814 except cAuthenticationError: 1815 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1816 t, v = sys.exc_info()[:2] 1817 print ' ', t 1818 print ' ', v 1819 1820 dsn = 'dbname=gnumed_v9 user=any-doc' 1821 try: 1822 conn = get_connection(dsn=dsn) 1823 except cAuthenticationError: 1824 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1825 t, v = sys.exc_info()[:2] 1826 print ' ', t 1827 print ' ', v 1828 1829 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1830 try: 1831 conn = get_connection(dsn=dsn) 1832 except cAuthenticationError: 1833 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1834 t, v = sys.exc_info()[:2] 1835 print ' ', t 1836 print ' ', v 1837 1838 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1839 conn = get_connection(dsn=dsn, readonly=True) 1840 1841 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1842 conn = get_connection(dsn=dsn, readonly=False) 1843 1844 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1845 encoding = 'foo' 1846 try: 1847 conn = get_connection(dsn=dsn, encoding=encoding) 1848 except cEncodingError: 1849 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1850 t, v = sys.exc_info()[:2] 1851 print ' ', t 1852 print ' ', v
1853 #--------------------------------------------------------------------
1854 - def test_exceptions():
1855 print "testing exceptions" 1856 1857 try: 1858 raise cAuthenticationError('no dsn', 'no previous exception') 1859 except cAuthenticationError: 1860 t, v, tb = sys.exc_info() 1861 print t 1862 print v 1863 print tb 1864 1865 try: 1866 raise cEncodingError('no dsn', 'no previous exception') 1867 except cEncodingError: 1868 t, v, tb = sys.exc_info() 1869 print t 1870 print v 1871 print tb
1872 #--------------------------------------------------------------------
1873 - def test_ro_queries():
1874 print "testing run_ro_queries()" 1875 1876 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1877 conn = get_connection(dsn, readonly=True) 1878 1879 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1880 print data 1881 print idx 1882 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True) 1883 print data 1884 print idx 1885 1886 curs = conn.cursor() 1887 1888 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1889 print data 1890 print idx 1891 1892 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True, verbose=True) 1893 print data 1894 print idx 1895 1896 try: 1897 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 1898 print data 1899 print idx 1900 except psycopg2.ProgrammingError: 1901 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 1902 t, v = sys.exc_info()[:2] 1903 print ' ', t 1904 print ' ', v 1905 1906 curs.close()
1907 #--------------------------------------------------------------------
1908 - def test_request_dsn():
1909 conn = get_connection() 1910 print conn 1911 conn.close()
1912 #--------------------------------------------------------------------
1913 - def test_set_encoding():
1914 print "testing set_default_client_encoding()" 1915 1916 enc = 'foo' 1917 try: 1918 set_default_client_encoding(enc) 1919 print "SUCCESS: encoding [%s] worked" % enc 1920 except ValueError: 1921 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1922 t, v = sys.exc_info()[:2] 1923 print ' ', t 1924 print ' ', v 1925 1926 enc = '' 1927 try: 1928 set_default_client_encoding(enc) 1929 print "SUCCESS: encoding [%s] worked" % enc 1930 except ValueError: 1931 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1932 t, v = sys.exc_info()[:2] 1933 print ' ', t 1934 print ' ', v 1935 1936 enc = 'latin1' 1937 try: 1938 set_default_client_encoding(enc) 1939 print "SUCCESS: encoding [%s] worked" % enc 1940 except ValueError: 1941 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1942 t, v = sys.exc_info()[:2] 1943 print ' ', t 1944 print ' ', v 1945 1946 enc = 'utf8' 1947 try: 1948 set_default_client_encoding(enc) 1949 print "SUCCESS: encoding [%s] worked" % enc 1950 except ValueError: 1951 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1952 t, v = sys.exc_info()[:2] 1953 print ' ', t 1954 print ' ', v 1955 1956 enc = 'unicode' 1957 try: 1958 set_default_client_encoding(enc) 1959 print "SUCCESS: encoding [%s] worked" % enc 1960 except ValueError: 1961 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1962 t, v = sys.exc_info()[:2] 1963 print ' ', t 1964 print ' ', v 1965 1966 enc = 'UNICODE' 1967 try: 1968 set_default_client_encoding(enc) 1969 print "SUCCESS: encoding [%s] worked" % enc 1970 except ValueError: 1971 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1972 t, v = sys.exc_info()[:2] 1973 print ' ', t 1974 print ' ', v
1975 #--------------------------------------------------------------------
1976 - def test_connection_pool():
1977 dsn = get_default_dsn() 1978 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 1979 print pool 1980 print pool.getconn() 1981 print pool.getconn() 1982 print pool.getconn() 1983 print type(pool.getconn())
1984 #--------------------------------------------------------------------
1985 - def test_list_args():
1986 dsn = get_default_dsn() 1987 conn = get_connection(dsn, readonly=True) 1988 curs = conn.cursor() 1989 curs.execute('SELECT * from clin.clin_narrative where narrative = %s', ['a'])
1990 #--------------------------------------------------------------------
1991 - def test_sanitize_pg_regex():
1992 tests = [ 1993 ['(', '\\('] 1994 , ['[', '\\['] 1995 , [')', '\\)'] 1996 ] 1997 for test in tests: 1998 result = sanitize_pg_regex(test[0]) 1999 if result != test[1]: 2000 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
2001 #--------------------------------------------------------------------
2002 - def test_is_pg_interval():
2003 status = True 2004 tests = [ 2005 [None, True], # None == NULL == succeeds ! 2006 [1, True], 2007 ['1', True], 2008 ['abc', False] 2009 ] 2010 2011 if not is_pg_interval(): 2012 print 'ERROR: is_pg_interval() returned "False", expected "True"' 2013 status = False 2014 2015 for test in tests: 2016 result = is_pg_interval(test[0]) 2017 if result != test[1]: 2018 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 2019 status = False 2020 2021 return status
2022 #--------------------------------------------------------------------
2023 - def test_sanity_check_time_skew():
2024 sanity_check_time_skew()
2025 #--------------------------------------------------------------------
2026 - def test_get_foreign_key_details():
2027 for row in get_foreign_keys2column ( 2028 schema = u'dem', 2029 table = u'identity', 2030 column = u'pk' 2031 ): 2032 print '%s.%s references %s.%s.%s' % ( 2033 row['referencing_table'], 2034 row['referencing_column'], 2035 row['referenced_schema'], 2036 row['referenced_table'], 2037 row['referenced_column'] 2038 )
2039 #--------------------------------------------------------------------
2040 - def test_set_user_language():
2041 # (user, language, result, exception type) 2042 tests = [ 2043 # current user 2044 [None, 'de_DE', True], 2045 [None, 'lang_w/o_tx', False], 2046 [None, None, True], 2047 # valid user 2048 ['any-doc', 'de_DE', True], 2049 ['any-doc', 'lang_w/o_tx', False], 2050 ['any-doc', None, True], 2051 # invalid user 2052 ['invalid user', 'de_DE', None], 2053 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 2054 ['invalid user', None, True] 2055 ] 2056 for test in tests: 2057 try: 2058 result = set_user_language(user = test[0], language = test[1]) 2059 if result != test[2]: 2060 print "test:", test 2061 print "result:", result, "expected:", test[2] 2062 except psycopg2.IntegrityError, e: 2063 if test[2] is None: 2064 continue 2065 print "test:", test 2066 print "expected exception" 2067 print "result:", e
2068 #--------------------------------------------------------------------
2069 - def test_get_schema_revision_history():
2070 for line in get_schema_revision_history(): 2071 print u' - '.join(line)
2072 #--------------------------------------------------------------------
2073 - def test_run_query():
2074 gmDateTime.init() 2075 args = {'dt': gmDateTime.pydt_max_here()} 2076 cmd = u"SELECT %(dt)s" 2077 2078 #cmd = u"SELECT 'infinity'::timestamp with time zone" 2079 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 2080 print rows
2081 #--------------------------------------------------------------------
2082 - def test_schema_exists():
2083 print schema_exists()
2084 2085 #-------------------------------------------------------------------- 2086 # run tests 2087 #test_file2bytea() 2088 #test_get_connection() 2089 #test_exceptions() 2090 #test_ro_queries() 2091 #test_request_dsn() 2092 #test_set_encoding() 2093 #test_connection_pool() 2094 #test_list_args() 2095 #test_sanitize_pg_regex() 2096 #test_is_pg_interval() 2097 #test_sanity_check_time_skew() 2098 #test_get_foreign_key_details() 2099 #test_set_user_language() 2100 #test_get_schema_revision_history() 2101 #test_run_query() 2102 test_schema_exists() 2103 2104 # ====================================================================== 2105