Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  __version__ = "$Revision: 1.127 $" 
  15  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  16  __license__ = 'GPL v2 or later (details at http://www.gnu.org)' 
  17   
  18  ### imports ### 
  19  # stdlib 
  20  import time, locale, sys, re as regex, os, codecs, types, datetime as pydt, logging 
  21   
  22   
  23  # GNUmed 
  24  if __name__ == '__main__': 
  25          sys.path.insert(0, '../../') 
  26  from Gnumed.pycommon import gmLoginInfo, gmExceptions, gmDateTime, gmBorg, gmI18N, gmLog2 
  27  from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character 
  28   
  29  _log = logging.getLogger('gm.db') 
  30  _log.info(__version__) 
  31   
  32   
  33  # 3rd party 
  34  try: 
  35          import psycopg2 as dbapi 
  36  except ImportError: 
  37          _log.exception("Python database adapter psycopg2 not found.") 
  38          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  39          raise 
  40  ### imports ### 
  41   
  42   
  43  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  44  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  45  if not (float(dbapi.apilevel) >= 2.0): 
  46          raise ImportError('gmPG2: supported DB-API level too low') 
  47  if not (dbapi.threadsafety > 0): 
  48          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  49  if not (dbapi.paramstyle == 'pyformat'): 
  50          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  51  try: 
  52          dbapi.__version__.index('dt') 
  53  except ValueError: 
  54          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  55  try: 
  56          dbapi.__version__.index('ext') 
  57  except ValueError: 
  58          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  59  try: 
  60          dbapi.__version__.index('pq3') 
  61  except ValueError: 
  62          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  63   
  64  import psycopg2.extras 
  65  import psycopg2.extensions 
  66  import psycopg2.pool 
  67  import psycopg2.errorcodes as sql_error_codes 
  68   
  69  # ======================================================================= 
  70  _default_client_encoding = 'UTF8' 
  71  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  72   
  73  # things timezone 
  74  _default_client_timezone = None                 # default time zone for connections 
  75  _sql_set_timezone = None 
  76  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  77  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  78   
  79  _default_dsn = None 
  80  _default_login = None 
  81   
  82  postgresql_version_string = None 
  83  postgresql_version = None                       # accuracy: major.minor 
  84   
  85  __ro_conn_pool = None 
  86   
  87  auto_request_login_params = True 
  88  # ======================================================================= 
  89  # global data 
  90  # ======================================================================= 
  91   
  92  known_schema_hashes = { 
  93          0: 'not released, testing only', 
  94          2: 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
  95          3: 'e73718eaf230d8f1d2d01afa8462e176', 
  96          4: '4428ccf2e54c289136819e701bb095ea', 
  97          5: '7e7b093af57aea48c288e76632a382e5',  # ... old (v1) style hashes 
  98          6: '90e2026ac2efd236da9c8608b8685b2d',  # new (v2) style hashes ... 
  99          7: '6c9f6d3981483f8e9433df99d1947b27', 
 100          8: '89b13a7af83337c3aad153b717e52360', 
 101          9: '641a9b2be3c378ffc2bb2f0b1c9f051d', 
 102          10: '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 103          11: '03042ae24f3f92877d986fb0a6184d76', 
 104          12: '06183a6616db62257e22814007a8ed07', 
 105          13: 'fab7c1ae408a6530c47f9b5111a0841e', 
 106          14: 'e170d543f067d1ea60bfe9076b1560cf', 
 107          15: '70012ff960b77ecdff4981c94b5b55b6', 
 108          16: '49f03ae688e714ad940e050f8d9557de' 
 109  } 
 110   
 111  map_schema_hash2version = { 
 112          'b09d50d7ed3f91ddf4c4ddb8ea507720': 2, 
 113          'e73718eaf230d8f1d2d01afa8462e176': 3, 
 114          '4428ccf2e54c289136819e701bb095ea': 4, 
 115          '7e7b093af57aea48c288e76632a382e5': 5, 
 116          '90e2026ac2efd236da9c8608b8685b2d': 6, 
 117          '6c9f6d3981483f8e9433df99d1947b27': 7, 
 118          '89b13a7af83337c3aad153b717e52360': 8, 
 119          '641a9b2be3c378ffc2bb2f0b1c9f051d': 9, 
 120          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 10, 
 121          '03042ae24f3f92877d986fb0a6184d76': 11, 
 122          '06183a6616db62257e22814007a8ed07': 12, 
 123          'fab7c1ae408a6530c47f9b5111a0841e': 13, 
 124          'e170d543f067d1ea60bfe9076b1560cf': 14, 
 125          '70012ff960b77ecdff4981c94b5b55b6': 15, 
 126          '49f03ae688e714ad940e050f8d9557de': 16 
 127  } 
 128   
 129  map_client_branch2required_db_version = { 
 130          u'GIT tree': 0, 
 131          u'0.3': 9, 
 132          u'0.4': 10, 
 133          u'0.5': 11, 
 134          u'0.6': 12, 
 135          u'0.7': 13, 
 136          u'0.8': 14, 
 137          u'0.9': 15, 
 138          u'1.0': 16, 
 139          u'1.1': 16 
 140  } 
 141   
 142  # get columns and data types for a given table 
 143  query_table_col_defs = u"""select 
 144          cols.column_name, 
 145          cols.udt_name 
 146  from 
 147          information_schema.columns cols 
 148  where 
 149          cols.table_schema = %s 
 150                  and 
 151          cols.table_name = %s 
 152  order by 
 153          cols.ordinal_position""" 
 154   
 155  query_table_attributes = u"""select 
 156          cols.column_name 
 157  from 
 158          information_schema.columns cols 
 159  where 
 160          cols.table_schema = %s 
 161                  and 
 162          cols.table_name = %s 
 163  order by 
 164          cols.ordinal_position""" 
 165   
 166  # ======================================================================= 
 167  # module globals API 
 168  # ======================================================================= 
169 -def set_default_client_encoding(encoding = None):
170 # check whether psycopg2 can handle this encoding 171 if encoding not in psycopg2.extensions.encodings: 172 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 173 # check whether Python can handle this encoding 174 py_enc = psycopg2.extensions.encodings[encoding] 175 try: 176 codecs.lookup(py_enc) 177 except LookupError: 178 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 179 raise 180 # FIXME: check encoding against the database 181 # FIXME: - but we may not yet have access 182 # FIXME: - psycopg2 will pull its encodings from the database eventually 183 # it seems save to set it 184 global _default_client_encoding 185 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 186 _default_client_encoding = encoding 187 return True
188 #---------------------------------------------------
189 -def set_default_client_timezone(timezone = None):
190 191 # FIXME: use __validate 192 global _default_client_timezone 193 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 194 _default_client_timezone = timezone 195 196 global _sql_set_timezone 197 _sql_set_timezone = u'set timezone to %s' 198 199 return True
200 #---------------------------------------------------
201 -def __validate_timezone(conn=None, timezone=None):
202 203 _log.debug(u'validating time zone [%s]', timezone) 204 205 cmd = u'set timezone to %(tz)s' 206 args = {u'tz': timezone} 207 208 conn.commit() 209 curs = conn.cursor() 210 is_valid = False 211 try: 212 curs.execute(cmd, args) 213 _log.info(u'time zone [%s] is settable', timezone) 214 # can we actually use it, though ? 215 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 216 try: 217 curs.execute(cmd) 218 curs.fetchone() 219 _log.info(u'time zone [%s] is usable', timezone) 220 is_valid = True 221 except: 222 _log.error('error using time zone [%s]', timezone) 223 except dbapi.DataError: 224 _log.warning(u'time zone [%s] is not settable', timezone) 225 except: 226 _log.error(u'failed to set time zone to [%s]', timezone) 227 _log.exception(u'') 228 229 curs.close() 230 conn.rollback() 231 232 return is_valid
233 #---------------------------------------------------
234 -def __expand_timezone(conn=None, timezone=None):
235 """some timezone defs are abbreviations so try to expand 236 them because "set time zone" doesn't take abbreviations""" 237 238 cmd = u""" 239 select distinct on (abbrev) name 240 from pg_timezone_names 241 where 242 abbrev = %(tz)s and 243 name ~ '^[^/]+/[^/]+$' and 244 name !~ '^Etc/' 245 """ 246 args = {u'tz': timezone} 247 248 conn.commit() 249 curs = conn.cursor() 250 251 result = timezone 252 try: 253 curs.execute(cmd, args) 254 rows = curs.fetchall() 255 if len(rows) > 0: 256 result = rows[0][0] 257 _log.debug(u'[%s] maps to [%s]', timezone, result) 258 except: 259 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 260 261 curs.close() 262 conn.rollback() 263 264 return result
265 #---------------------------------------------------
266 -def __detect_client_timezone(conn=None):
267 """This is run on the very first connection.""" 268 269 # FIXME: check whether server.timezone is the same 270 # FIXME: value as what we eventually detect 271 272 # we need gmDateTime to be initialized 273 if gmDateTime.current_local_iso_numeric_timezone_string is None: 274 gmDateTime.init() 275 276 _log.debug('trying to detect timezone from system') 277 278 tz_candidates = [] 279 try: 280 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 281 tz_candidates.append(tz) 282 expanded = __expand_timezone(conn = conn, timezone = tz) 283 if expanded != tz: 284 tz_candidates.append(expanded) 285 except KeyError: 286 pass 287 288 tz_candidates.append(gmDateTime.current_local_timezone_name) 289 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 290 if expanded != gmDateTime.current_local_timezone_name: 291 tz_candidates.append(expanded) 292 293 _log.debug('candidates: %s', str(tz_candidates)) 294 295 # find best among candidates 296 global _default_client_timezone 297 global _sql_set_timezone 298 found = False 299 for tz in tz_candidates: 300 if __validate_timezone(conn = conn, timezone = tz): 301 _default_client_timezone = tz 302 _sql_set_timezone = u'set timezone to %s' 303 found = True 304 break 305 306 if not found: 307 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 308 _sql_set_timezone = u"set time zone interval %s hour to minute" 309 310 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
311 # ======================================================================= 312 # login API 313 # =======================================================================
314 -def __request_login_params_tui():
315 """Text mode request of database login parameters""" 316 import getpass 317 login = gmLoginInfo.LoginInfo() 318 319 print "\nPlease enter the required login parameters:" 320 try: 321 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '') 322 login.database = prompted_input(prompt = "database", default = 'gnumed_v16') 323 login.user = prompted_input(prompt = "user name", default = '') 324 tmp = 'password for "%s" (not shown): ' % login.user 325 login.password = getpass.getpass(tmp) 326 login.port = prompted_input(prompt = "port", default = 5432) 327 except KeyboardInterrupt: 328 _log.warning("user cancelled text mode login dialog") 329 print "user cancelled text mode login dialog" 330 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 331 332 return login
333 #---------------------------------------------------
334 -def __request_login_params_gui_wx():
335 """GUI (wx) input request for database login parameters. 336 337 Returns gmLoginInfo.LoginInfo object 338 """ 339 import wx 340 # OK, wxPython was already loaded. But has the main Application instance 341 # been initialized yet ? if not, the exception will kick us out 342 if wx.GetApp() is None: 343 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 344 345 # Let's launch the login dialog 346 # if wx was not initialized /no main App loop, an exception should be raised anyway 347 import gmAuthWidgets 348 dlg = gmAuthWidgets.cLoginDialog(None, -1) 349 dlg.ShowModal() 350 login = dlg.panel.GetLoginInfo() 351 dlg.Destroy() 352 353 #if user cancelled or something else went wrong, raise an exception 354 if login is None: 355 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 356 357 return login
358 #---------------------------------------------------
359 -def request_login_params():
360 """Request login parameters for database connection.""" 361 # do we auto-request parameters at all ? 362 if not auto_request_login_params: 363 raise Exception('Cannot request login parameters.') 364 365 # are we inside X ? 366 # (if we aren't wxGTK will crash hard at 367 # C-level with "can't open Display") 368 if os.environ.has_key('DISPLAY'): 369 # try wxPython GUI 370 try: return __request_login_params_gui_wx() 371 except: pass 372 373 # well, either we are on the console or 374 # wxPython does not work, use text mode 375 return __request_login_params_tui()
376 377 # ======================================================================= 378 # DSN API 379 # -----------------------------------------------------------------------
380 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
381 dsn_parts = [] 382 383 if (database is not None) and (database.strip() != ''): 384 dsn_parts.append('dbname=%s' % database) 385 386 if (host is not None) and (host.strip() != ''): 387 dsn_parts.append('host=%s' % host) 388 389 if (port is not None) and (str(port).strip() != ''): 390 dsn_parts.append('port=%s' % port) 391 392 if (user is not None) and (user.strip() != ''): 393 dsn_parts.append('user=%s' % user) 394 395 if (password is not None) and (password.strip() != ''): 396 dsn_parts.append('password=%s' % password) 397 398 dsn_parts.append('sslmode=prefer') 399 400 return ' '.join(dsn_parts)
401 # ------------------------------------------------------
402 -def get_default_login():
403 # make sure we do have a login 404 get_default_dsn() 405 return _default_login
406 # ------------------------------------------------------
407 -def get_default_dsn():
408 global _default_dsn 409 if _default_dsn is not None: 410 return _default_dsn 411 412 login = request_login_params() 413 set_default_login(login=login) 414 415 return _default_dsn
416 # ------------------------------------------------------
417 -def set_default_login(login=None):
418 if login is None: 419 return False 420 421 if login.host is not None: 422 if login.host.strip() == u'': 423 login.host = None 424 425 global _default_login 426 _default_login = login 427 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 428 429 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 430 431 global _default_dsn 432 if _default_dsn is None: 433 old_dsn = u'None' 434 else: 435 old_dsn = regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, _default_dsn) 436 _log.info ('setting default DSN from [%s] to [%s]', 437 old_dsn, 438 regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, dsn) 439 ) 440 _default_dsn = dsn 441 442 return True
443 # ======================================================================= 444 # netadata API 445 # =======================================================================
446 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
447 expected_hash = known_schema_hashes[version] 448 if version == 0: 449 args = {'ver': 9999} 450 else: 451 args = {'ver': version} 452 rows, idx = run_ro_queries ( 453 link_obj = link_obj, 454 queries = [{ 455 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 456 'args': args 457 }] 458 ) 459 if rows[0]['md5'] != expected_hash: 460 _log.error('database schema version mismatch') 461 _log.error('expected: %s (%s)' % (version, expected_hash)) 462 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 463 if verbose: 464 _log.debug('schema dump follows:') 465 for line in get_schema_structure(link_obj=link_obj).split(): 466 _log.debug(line) 467 _log.debug('schema revision history dump follows:') 468 for line in get_schema_revision_history(link_obj=link_obj): 469 _log.debug(u' - '.join(line)) 470 return False 471 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 472 return True
473 #------------------------------------------------------------------------
474 -def get_schema_version(link_obj=None):
475 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 476 try: 477 return map_schema_hash2version[rows[0]['md5']] 478 except KeyError: 479 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
480 #------------------------------------------------------------------------
481 -def get_schema_structure(link_obj=None):
482 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 483 return rows[0][0]
484 #------------------------------------------------------------------------
485 -def get_schema_revision_history(link_obj=None):
486 cmd = u""" 487 select 488 imported::text, 489 version, 490 filename 491 from gm.schema_revision 492 order by imported 493 """ 494 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': cmd}]) 495 return rows
496 #------------------------------------------------------------------------
497 -def get_current_user():
498 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 499 return rows[0][0]
500 #------------------------------------------------------------------------
501 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
502 """Get the foreign keys pointing to schema.table.column. 503 504 Does not properly work with multi-column FKs. 505 GNUmed doesn't use any, however. 506 """ 507 cmd = u""" 508 select 509 %(schema)s as referenced_schema, 510 %(tbl)s as referenced_table, 511 %(col)s as referenced_column, 512 pgc.confkey as referenced_column_list, 513 pgc.conrelid::regclass as referencing_table, 514 pgc.conkey as referencing_column_list, 515 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 516 from 517 pg_constraint pgc 518 where 519 pgc.contype = 'f' 520 and 521 pgc.confrelid = ( 522 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 523 select oid from pg_namespace where nspname = %(schema)s 524 ) 525 ) and 526 ( 527 select attnum 528 from pg_attribute 529 where 530 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 531 select oid from pg_namespace where nspname = %(schema)s 532 )) 533 and 534 attname = %(col)s 535 ) = any(pgc.confkey) 536 """ 537 538 args = { 539 'schema': schema, 540 'tbl': table, 541 'col': column 542 } 543 544 rows, idx = run_ro_queries ( 545 link_obj = link_obj, 546 queries = [ 547 {'cmd': cmd, 'args': args} 548 ] 549 ) 550 551 return rows
552 #------------------------------------------------------------------------
553 -def get_child_tables(schema='public', table=None, link_obj=None):
554 """Return child tables of <table>.""" 555 cmd = u""" 556 select 557 pgn.nspname as namespace, 558 pgc.relname as table 559 from 560 pg_namespace pgn, 561 pg_class pgc 562 where 563 pgc.relnamespace = pgn.oid 564 and 565 pgc.oid in ( 566 select inhrelid from pg_inherits where inhparent = ( 567 select oid from pg_class where 568 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 569 relname = %(table)s 570 ) 571 )""" 572 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 573 return rows
574 #------------------------------------------------------------------------
575 -def schema_exists(link_obj=None, schema=u'gm'):
576 cmd = u"""SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = %(schema)s)""" 577 args = {'schema': schema} 578 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}]) 579 return rows[0][0]
580 #------------------------------------------------------------------------
581 -def table_exists(link_obj=None, schema=None, table=None):
582 """Returns false, true.""" 583 cmd = u""" 584 select exists ( 585 select 1 from information_schema.tables 586 where 587 table_schema = %s and 588 table_name = %s and 589 table_type = 'BASE TABLE' 590 )""" 591 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 592 return rows[0][0]
593 #------------------------------------------------------------------------
594 -def get_col_indices(cursor = None):
595 if cursor.description is None: 596 _log.error('no result description available: unused cursor or last query did not select rows') 597 return None 598 col_indices = {} 599 col_index = 0 600 for col_desc in cursor.description: 601 col_name = col_desc[0] 602 # a query like "select 1,2;" will return two columns of the same name ! 603 # hence adjust to that, note, however, that dict-style access won't work 604 # on results of such queries ... 605 if col_indices.has_key(col_name): 606 col_name = '%s_%s' % (col_name, col_index) 607 col_indices[col_name] = col_index 608 col_index += 1 609 610 return col_indices
611 #------------------------------------------------------------------------
612 -def get_col_defs(link_obj=None, schema='public', table=None):
613 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 614 col_names = [] 615 col_type = {} 616 for row in rows: 617 col_names.append(row[0]) 618 # map array types 619 if row[1].startswith('_'): 620 col_type[row[0]] = row[1][1:] + '[]' 621 else: 622 col_type[row[0]] = row[1] 623 col_defs = [] 624 col_defs.append(col_names) 625 col_defs.append(col_type) 626 return col_defs
627 #------------------------------------------------------------------------
628 -def get_col_names(link_obj=None, schema='public', table=None):
629 """Return column attributes of table""" 630 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 631 cols = [] 632 for row in rows: 633 cols.append(row[0]) 634 return cols
635 636 #------------------------------------------------------------------------ 637 # i18n functions 638 #------------------------------------------------------------------------
639 -def export_translations_from_database(filename=None):
640 tx_file = codecs.open(filename, 'wb', 'utf8') 641 tx_file.write(u'-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M')) 642 tx_file.write(u'-- - contains translations for each of [%s]\n' % u', '.join(get_translation_languages())) 643 tx_file.write(u'-- - user language is set to [%s]\n\n' % get_current_user_language()) 644 tx_file.write(u'-- Please email this file to <gnumed-devel@gnu.org>.\n') 645 tx_file.write(u'-- ----------------------------------------------------------------------------------------------\n\n') 646 tx_file.write(u'set default_transaction_read_only to off\n\n') 647 tx_file.write(u'\\unset ON_ERROR_STOP\n\n') 648 649 cmd = u'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig' 650 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 651 for row in rows: 652 line = u"select i18n.upd_tx(quote_literal(E'%s'), quote_literal(E'%s'), quote_literal(E'%s'));\n" % ( 653 row['lang'].replace("'", "\\'"), 654 row['orig'].replace("'", "\\'"), 655 row['trans'].replace("'", "\\'") 656 ) 657 tx_file.write(line) 658 tx_file.write(u'\n') 659 660 tx_file.write(u'\set ON_ERROR_STOP 1\n') 661 tx_file.close() 662 663 return True
664 #------------------------------------------------------------------------
665 -def delete_translation_from_database(link_obj=None, language=None, original=None):
666 cmd = u'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s' 667 args = {'lang': language, 'orig': original} 668 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True) 669 return True
670 671 #------------------------------------------------------------------------
672 -def update_translation_in_database(language=None, original=None, translation=None):
673 cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)' 674 args = {'lang': language, 'orig': original, 'trans': translation} 675 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False) 676 return args
677 678 #------------------------------------------------------------------------
679 -def get_translation_languages():
680 rows, idx = run_ro_queries ( 681 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 682 ) 683 return [ r[0] for r in rows ]
684 685 #------------------------------------------------------------------------
686 -def get_database_translations(language=None, order_by=None):
687 688 args = {'lang': language} 689 _log.debug('language [%s]', language) 690 691 if order_by is None: 692 order_by = u'ORDER BY %s' % order_by 693 else: 694 order_by = u'ORDER BY lang, orig' 695 696 if language is None: 697 cmd = u""" 698 SELECT DISTINCT ON (orig, lang) 699 lang, orig, trans 700 FROM (( 701 702 -- strings stored as translation keys whether translated or not 703 SELECT 704 NULL as lang, 705 ik.orig, 706 NULL AS trans 707 FROM 708 i18n.keys ik 709 710 ) UNION ALL ( 711 712 -- already translated strings 713 SELECT 714 it.lang, 715 it.orig, 716 it.trans 717 FROM 718 i18n.translations it 719 720 )) as translatable_strings 721 %s""" % order_by 722 else: 723 cmd = u""" 724 SELECT DISTINCT ON (orig, lang) 725 lang, orig, trans 726 FROM (( 727 728 -- strings stored as translation keys whether translated or not 729 SELECT 730 %%(lang)s as lang, 731 ik.orig, 732 i18n._(ik.orig, %%(lang)s) AS trans 733 FROM 734 i18n.keys ik 735 736 ) UNION ALL ( 737 738 -- already translated strings 739 SELECT 740 %%(lang)s as lang, 741 it.orig, 742 i18n._(it.orig, %%(lang)s) AS trans 743 FROM 744 i18n.translations it 745 746 )) AS translatable_strings 747 %s""" % order_by 748 749 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 750 751 if rows is None: 752 _log.error('no translatable strings found') 753 else: 754 _log.debug('%s translatable strings found', len(rows)) 755 756 return rows
757 758 #------------------------------------------------------------------------
759 -def get_current_user_language():
760 cmd = u'select i18n.get_curr_lang()' 761 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 762 return rows[0][0]
763 764 #------------------------------------------------------------------------
765 -def set_user_language(user=None, language=None):
766 """Set the user language in the database. 767 768 user = None: current db user 769 language = None: unset 770 """ 771 _log.info('setting database language for user [%s] to [%s]', user, language) 772 773 args = { 774 'usr': user, 775 'lang': language 776 } 777 778 if language is None: 779 if user is None: 780 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 781 else: 782 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 783 queries.append({'cmd': u'select True'}) 784 else: 785 if user is None: 786 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 787 else: 788 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 789 790 rows, idx = run_rw_queries(queries = queries, return_data = True) 791 792 if not rows[0][0]: 793 _log.error('cannot set database language to [%s] for user [%s]', language, user) 794 795 return rows[0][0]
796 #------------------------------------------------------------------------
797 -def force_user_language(language=None):
798 """Set the user language in the database. 799 800 - regardless of whether there is any translation available. 801 - only for the current user 802 """ 803 _log.info('forcing database language for current db user to [%s]', language) 804 805 run_rw_queries(queries = [{ 806 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 807 'args': {'lang': language} 808 }])
809 #------------------------------------------------------------------------ 810 #------------------------------------------------------------------------ 811 text_expansion_keywords = None 812
813 -def get_text_expansion_keywords():
814 global text_expansion_keywords 815 if text_expansion_keywords is not None: 816 return text_expansion_keywords 817 818 cmd = u"""select keyword, public_expansion, private_expansion, owner from clin.v_keyword_expansions""" 819 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 820 text_expansion_keywords = rows 821 822 _log.info('retrieved %s text expansion keywords', len(text_expansion_keywords)) 823 824 return text_expansion_keywords
825 #------------------------------------------------------------------------
826 -def expand_keyword(keyword = None):
827 828 # Easter Egg ;-) 829 if keyword == u'$$steffi': 830 return u'Hai, play ! Versucht das ! (Keks dazu ?) :-)' 831 832 cmd = u"""select expansion from clin.v_your_keyword_expansions where keyword = %(kwd)s""" 833 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 834 835 if len(rows) == 0: 836 return None 837 838 return rows[0]['expansion']
839 #------------------------------------------------------------------------
840 -def get_keyword_expansion_candidates(keyword = None):
841 842 if keyword is None: 843 return [] 844 845 get_text_expansion_keywords() 846 847 candidates = [] 848 for kwd in text_expansion_keywords: 849 if kwd['keyword'].startswith(keyword): 850 candidates.append(kwd['keyword']) 851 852 return candidates
853 #------------------------------------------------------------------------
854 -def add_text_expansion(keyword=None, expansion=None, public=None):
855 856 if public: 857 cmd = u"SELECT 1 from clin.v_keyword_expansions where public_expansion is true and keyword = %(kwd)s" 858 else: 859 cmd = u"SELECT 1 from clin.v_your_keyword_expansions where private_expansion is true and keyword = %(kwd)s" 860 861 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 862 if len(rows) != 0: 863 return False 864 865 if public: 866 cmd = u""" 867 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 868 values (%(kwd)s, %(exp)s, null)""" 869 else: 870 cmd = u""" 871 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 872 values (%(kwd)s, %(exp)s, (SELECT pk from dem.staff where db_user = current_user))""" 873 874 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword, 'exp': expansion}}]) 875 876 global text_expansion_keywords 877 text_expansion_keywords = None 878 879 return True
880 #------------------------------------------------------------------------
881 -def delete_text_expansion(keyword):
882 cmd = u""" 883 delete from clin.keyword_expansion where 884 keyword = %(kwd)s and ( 885 (fk_staff = (SELECT pk from dem.staff where db_user = current_user)) 886 or 887 (fk_staff is null and owner = current_user) 888 )""" 889 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 890 891 global text_expansion_keywords 892 text_expansion_keywords = None
893 #------------------------------------------------------------------------
894 -def edit_text_expansion(keyword, expansion):
895 896 cmd1 = u""" 897 DELETE FROM clin.keyword_expansion 898 WHERE 899 keyword = %(kwd)s 900 AND 901 fk_staff = (SELECT pk FROM dem.staff WHERE db_user = current_user)""" 902 903 cmd2 = u""" 904 INSERT INTO clin.keyword_expansion ( 905 keyword, expansion, fk_staff 906 ) VALUES ( 907 %(kwd)s, 908 %(exp)s, 909 (SELECT pk FROM dem.staff WHERE db_user = current_user) 910 )""" 911 args = {'kwd': keyword, 'exp': expansion} 912 rows, idx = run_rw_queries(queries = [ 913 {'cmd': cmd1, 'args': args}, 914 {'cmd': cmd2, 'args': args}, 915 ]) 916 917 global text_expansion_keywords 918 text_expansion_keywords = None
919 # ======================================================================= 920 # query runners and helpers 921 # =======================================================================
922 -def send_maintenance_notification():
923 cmd = u'notify "db_maintenance_warning:"' 924 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
925 #------------------------------------------------------------------------
926 -def send_maintenance_shutdown():
927 cmd = u'notify "db_maintenance_disconnect:"' 928 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
929 #------------------------------------------------------------------------
930 -def is_pg_interval(candidate=None):
931 cmd = u'SELECT %(candidate)s::interval' 932 try: 933 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 934 return True 935 except: 936 cmd = u'SELECT %(candidate)s::text::interval' 937 try: 938 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 939 return True 940 except: 941 return False
942 #------------------------------------------------------------------------
943 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
944 outfile = file(filename, 'wb') 945 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 946 outfile.close() 947 return result
948 #------------------------------------------------------------------------
949 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
950 """Store data from a bytea field into a file. 951 952 <data_query> 953 - dict {'cmd': ..., 'args': ...} 954 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 955 - 'args' must be a dict 956 - must return one row with one field of type bytea 957 <file> 958 - must be a file like Python object 959 <data_size> 960 - integer of the total size of the expected data or None 961 <data_size_query> 962 - dict {'cmd': ..., 'args': ...} 963 - cmd must be unicode 964 - must return one row with one field with the octet_length() of the data field 965 - used only when <data_size> is None 966 """ 967 if data_size == 0: 968 return True 969 970 # If the client sets an encoding other than the default we 971 # will receive encoding-parsed data which isn't the binary 972 # content we want. Hence we need to get our own connection. 973 # It must be a read-write one so that we don't affect the 974 # encoding for other users of the shared read-only 975 # connections. 976 # Actually, encodings shouldn't be applied to binary data 977 # (eg. bytea types) in the first place but that is only 978 # reported to be fixed > v7.4. 979 # further tests reveal that at least on PG 8.0 this bug still 980 # manifests itself 981 conn = get_raw_connection(readonly=True) 982 983 if data_size is None: 984 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 985 data_size = rows[0][0] 986 if data_size in [None, 0]: 987 conn.rollback() 988 return True 989 990 _log.debug('expecting bytea data of size: [%s] bytes' % data_size) 991 _log.debug('using chunk size of: [%s] bytes' % chunk_size) 992 993 # chunk size of 0 means "retrieve whole field at once" 994 if chunk_size == 0: 995 chunk_size = data_size 996 _log.debug('chunk size [0] bytes: retrieving all data at once') 997 998 # Windoze sucks: it can't transfer objects of arbitrary size, 999 # anyways, we need to split the transfer, 1000 # however, only possible if postgres >= 7.2 1001 needed_chunks, remainder = divmod(data_size, chunk_size) 1002 _log.debug('chunks to retrieve: [%s]' % needed_chunks) 1003 _log.debug('remainder to retrieve: [%s] bytes' % remainder) 1004 1005 # try setting "bytea_output" 1006 # - fails if not necessary 1007 # - succeeds if necessary 1008 try: 1009 run_ro_queries(link_obj = conn, queries = [{'cmd': u"set bytea_output to 'escape'"}]) 1010 except dbapi.ProgrammingError: 1011 _log.debug('failed to set bytea_output to "escape", not necessary') 1012 1013 # retrieve chunks, skipped if data size < chunk size, 1014 # does this not carry the danger of cutting up multi-byte escape sequences ? 1015 # no, since bytea is binary, 1016 # yes, since in bytea there are *some* escaped values, still 1017 # no, since those are only escaped during *transfer*, not on-disk, hence 1018 # only complete escape sequences are put on the wire 1019 for chunk_id in range(needed_chunks): 1020 chunk_start = (chunk_id * chunk_size) + 1 1021 data_query['args']['start'] = chunk_start 1022 data_query['args']['size'] = chunk_size 1023 try: 1024 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1025 except: 1026 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 1027 conn.rollback() 1028 raise 1029 # it would be a fatal error to see more than one result as ids are supposed to be unique 1030 file_obj.write(str(rows[0][0])) 1031 1032 # retrieve remainder 1033 if remainder > 0: 1034 chunk_start = (needed_chunks * chunk_size) + 1 1035 data_query['args']['start'] = chunk_start 1036 data_query['args']['size'] = remainder 1037 try: 1038 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1039 except: 1040 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 1041 conn.rollback() 1042 raise 1043 # it would be a fatal error to see more than one result as ids are supposed to be unique 1044 file_obj.write(str(rows[0][0])) 1045 1046 conn.rollback() 1047 return True
1048 #------------------------------------------------------------------------
1049 -def file2bytea(query=None, filename=None, args=None, conn=None):
1050 """Store data from a file into a bytea field. 1051 1052 The query must: 1053 - be in unicode 1054 - contain a format spec identifying the row (eg a primary key) 1055 matching <args> if it is an UPDATE 1056 - contain a format spec %(data)s::bytea 1057 """ 1058 # read data from file 1059 infile = file(filename, "rb") 1060 data_as_byte_string = infile.read() 1061 infile.close() 1062 if args is None: 1063 args = {} 1064 args['data'] = buffer(data_as_byte_string) 1065 del(data_as_byte_string) 1066 1067 # insert the data 1068 if conn is None: 1069 conn = get_raw_connection(readonly=False) 1070 close_conn = True 1071 else: 1072 close_conn = False 1073 1074 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 1075 1076 if close_conn: 1077 conn.close() 1078 1079 return
1080 #------------------------------------------------------------------------
1081 -def sanitize_pg_regex(expression=None, escape_all=False):
1082 """Escape input for use in a PostgreSQL regular expression. 1083 1084 If a fragment comes from user input and is to be used 1085 as a regular expression we need to make sure it doesn't 1086 contain invalid regex patterns such as unbalanced ('s. 1087 1088 <escape_all> 1089 True: try to escape *all* metacharacters 1090 False: only escape those which render the regex invalid 1091 """ 1092 return expression.replace ( 1093 '(', '\(' 1094 ).replace ( 1095 ')', '\)' 1096 ).replace ( 1097 '[', '\[' 1098 ).replace ( 1099 '+', '\+' 1100 ).replace ( 1101 '.', '\.' 1102 ).replace ( 1103 '*', '\*' 1104 )
1105 #']', '\]', # not needed 1106 #------------------------------------------------------------------------
1107 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1108 """Run read-only queries. 1109 1110 <queries> must be a list of dicts: 1111 [ 1112 {'cmd': <string>, 'args': <dict> or <tuple>}, 1113 {...}, 1114 ... 1115 ] 1116 """ 1117 if isinstance(link_obj, dbapi._psycopg.cursor): 1118 curs = link_obj 1119 curs_close = __noop 1120 tx_rollback = __noop 1121 elif isinstance(link_obj, dbapi._psycopg.connection): 1122 curs = link_obj.cursor() 1123 curs_close = curs.close 1124 tx_rollback = link_obj.rollback 1125 elif link_obj is None: 1126 conn = get_connection(readonly=True, verbose=verbose) 1127 curs = conn.cursor() 1128 curs_close = curs.close 1129 tx_rollback = conn.rollback 1130 else: 1131 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 1132 1133 if verbose: 1134 _log.debug('cursor: %s', curs) 1135 1136 for query in queries: 1137 if type(query['cmd']) is not types.UnicodeType: 1138 print "run_ro_queries(): non-unicode query" 1139 print query['cmd'] 1140 try: 1141 args = query['args'] 1142 except KeyError: 1143 args = None 1144 try: 1145 curs.execute(query['cmd'], args) 1146 if verbose: 1147 _log.debug('ran query: [%s]', curs.query) 1148 _log.debug('PG status message: %s', curs.statusmessage) 1149 _log.debug('cursor description: %s', str(curs.description)) 1150 except: 1151 # FIXME: use .pgcode 1152 try: 1153 curs_close() 1154 except dbapi.InterfaceError: 1155 _log.exception('cannot close cursor') 1156 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1157 _log.error('query failed: [%s]', curs.query) 1158 _log.error('PG status message: %s', curs.statusmessage) 1159 raise 1160 1161 data = None 1162 col_idx = None 1163 if return_data: 1164 data = curs.fetchall() 1165 if verbose: 1166 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1167 _log.debug('cursor description: %s', str(curs.description)) 1168 if get_col_idx: 1169 col_idx = get_col_indices(curs) 1170 1171 curs_close() 1172 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1173 return (data, col_idx)
1174 #------------------------------------------------------------------------
1175 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1176 """Convenience function for running a transaction 1177 that is supposed to get committed. 1178 1179 <link_obj> 1180 can be either: 1181 - a cursor 1182 - a connection 1183 1184 <queries> 1185 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1186 to be executed as a single transaction, the last 1187 query may usefully return rows (such as a 1188 "SELECT currval('some_sequence')" statement) 1189 1190 <end_tx> 1191 - controls whether the transaction is finalized (eg. 1192 committed/rolled back) or not, this allows the 1193 call to run_rw_queries() to be part of a framing 1194 transaction 1195 - if link_obj is a connection then <end_tx> will 1196 default to False unless it is explicitly set to 1197 True which is taken to mean "yes, you do have full 1198 control over the transaction" in which case the 1199 transaction is properly finalized 1200 - if link_obj is a cursor we CANNOT finalize the 1201 transaction because we would need the connection for that 1202 - if link_obj is None <end_tx> will, of course, always be True 1203 1204 <return_data> 1205 - if true, the returned data will include the rows 1206 the last query selected 1207 - if false, it returns None instead 1208 1209 <get_col_idx> 1210 - if true, the returned data will include a dictionary 1211 mapping field names to column positions 1212 - if false, the returned data returns None instead 1213 1214 method result: 1215 - returns a tuple (data, idx) 1216 - <data>: 1217 * (None, None) if last query did not return rows 1218 * ("fetchall() result", <index>) if last query returned any rows 1219 * for <index> see <get_col_idx> 1220 """ 1221 if isinstance(link_obj, dbapi._psycopg.cursor): 1222 conn_close = __noop 1223 conn_commit = __noop 1224 conn_rollback = __noop 1225 curs = link_obj 1226 curs_close = __noop 1227 elif isinstance(link_obj, dbapi._psycopg.connection): 1228 conn_close = __noop 1229 if end_tx: 1230 conn_commit = link_obj.commit 1231 conn_rollback = link_obj.rollback 1232 else: 1233 conn_commit = __noop 1234 conn_rollback = __noop 1235 curs = link_obj.cursor() 1236 curs_close = curs.close 1237 elif link_obj is None: 1238 conn = get_connection(readonly=False) 1239 conn_close = conn.close 1240 conn_commit = conn.commit 1241 conn_rollback = conn.rollback 1242 curs = conn.cursor() 1243 curs_close = curs.close 1244 else: 1245 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1246 1247 for query in queries: 1248 if type(query['cmd']) is not types.UnicodeType: 1249 print "run_rw_queries(): non-unicode query" 1250 print query['cmd'] 1251 try: 1252 args = query['args'] 1253 except KeyError: 1254 args = None 1255 try: 1256 curs.execute(query['cmd'], args) 1257 except: 1258 _log.exception('error running RW query') 1259 gmLog2.log_stack_trace() 1260 try: 1261 curs_close() 1262 conn_rollback() 1263 conn_close() 1264 except dbapi.InterfaceError: 1265 _log.exception('cannot cleanup') 1266 raise 1267 raise 1268 1269 data = None 1270 col_idx = None 1271 if return_data: 1272 try: 1273 data = curs.fetchall() 1274 except: 1275 _log.exception('error fetching data from RW query') 1276 gmLog2.log_stack_trace() 1277 try: 1278 curs_close() 1279 conn_rollback() 1280 conn_close() 1281 except dbapi.InterfaceError: 1282 _log.exception('cannot cleanup') 1283 raise 1284 raise 1285 if get_col_idx: 1286 col_idx = get_col_indices(curs) 1287 1288 curs_close() 1289 conn_commit() 1290 conn_close() 1291 1292 return (data, col_idx)
1293 #------------------------------------------------------------------------
1294 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
1295 """Generates SQL for an INSERT query. 1296 1297 values: dict of values keyed by field to insert them into 1298 """ 1299 if schema is None: 1300 schema = u'public' 1301 1302 fields = values.keys() # that way val_snippets and fields really should end up in the same order 1303 val_snippets = [] 1304 for field in fields: 1305 val_snippets.append(u'%%(%s)s' % field) 1306 1307 if returning is None: 1308 returning = u'' 1309 return_data = False 1310 else: 1311 returning = u'\n\tRETURNING\n\t\t%s' % u', '.join(returning) 1312 return_data = True 1313 1314 cmd = u"""\nINSERT INTO %s.%s ( 1315 %s 1316 ) VALUES ( 1317 %s 1318 )%s""" % ( 1319 schema, 1320 table, 1321 u',\n\t\t'.join(fields), 1322 u',\n\t\t'.join(val_snippets), 1323 returning 1324 ) 1325 1326 _log.debug(u'running SQL: >>>%s<<<', cmd) 1327 1328 return run_rw_queries ( 1329 link_obj = link_obj, 1330 queries = [{'cmd': cmd, 'args': values}], 1331 end_tx = end_tx, 1332 return_data = return_data, 1333 get_col_idx = get_col_idx, 1334 verbose = verbose 1335 )
1336 # ======================================================================= 1337 # connection handling API 1338 # -----------------------------------------------------------------------
1339 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1340 """ 1341 GNUmed database connection pool. 1342 1343 Extends psycopg2's PersistentConnectionPool with 1344 a custom _connect() function. Supports one connection 1345 per thread - which also ties it to one particular DSN. 1346 """ 1347 #--------------------------------------------------
1348 - def _connect(self, key=None):
1349 1350 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1351 1352 conn.original_close = conn.close 1353 conn.close = _raise_exception_on_ro_conn_close 1354 1355 if key is not None: 1356 self._used[key] = conn 1357 self._rused[id(conn)] = key 1358 else: 1359 self._pool.append(conn) 1360 1361 return conn
1362 #--------------------------------------------------
1363 - def shutdown(self):
1364 for conn_key in self._used.keys(): 1365 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1366 self._used[conn_key].original_close()
1367 # -----------------------------------------------------------------------
1368 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1369 """Get a raw, unadorned connection. 1370 1371 - this will not set any parameters such as encoding, timezone, datestyle 1372 - the only requirement is a valid DSN 1373 - hence it can be used for "service" connections 1374 for verifying encodings etc 1375 """ 1376 # FIXME: support verbose 1377 if dsn is None: 1378 dsn = get_default_dsn() 1379 1380 if u'host=salaam.homeunix' in dsn: 1381 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.') 1382 1383 try: 1384 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1385 except dbapi.OperationalError, e: 1386 1387 t, v, tb = sys.exc_info() 1388 try: 1389 msg = e.args[0] 1390 except (AttributeError, IndexError, TypeError): 1391 raise 1392 1393 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1394 1395 if msg.find('fe_sendauth') != -1: 1396 raise cAuthenticationError, (dsn, msg), tb 1397 1398 if regex.search('user ".*" does not exist', msg) is not None: 1399 raise cAuthenticationError, (dsn, msg), tb 1400 1401 if msg.find('uthenti') != -1: 1402 raise cAuthenticationError, (dsn, msg), tb 1403 1404 raise 1405 1406 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1407 1408 # do first-time stuff 1409 global postgresql_version 1410 if postgresql_version is None: 1411 curs = conn.cursor() 1412 curs.execute (""" 1413 SELECT 1414 (split_part(setting, '.', 1) || '.' || split_part(setting, '.', 2))::numeric as version 1415 from pg_settings 1416 where name='server_version'""" 1417 ) 1418 postgresql_version = curs.fetchone()['version'] 1419 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1420 try: 1421 curs.execute("SELECT pg_size_pretty(pg_database_size(current_database()))") 1422 _log.info('database size: %s', curs.fetchone()[0]) 1423 except: 1424 pass 1425 if verbose: 1426 __log_PG_settings(curs=curs) 1427 curs.close() 1428 conn.commit() 1429 1430 if _default_client_timezone is None: 1431 __detect_client_timezone(conn = conn) 1432 1433 curs = conn.cursor() 1434 1435 # set access mode 1436 if readonly: 1437 _log.debug('access mode [READ ONLY]') 1438 cmd = 'set session characteristics as transaction READ ONLY' 1439 curs.execute(cmd) 1440 cmd = 'set default_transaction_read_only to on' 1441 curs.execute(cmd) 1442 else: 1443 _log.debug('access mode [READ WRITE]') 1444 cmd = 'set session characteristics as transaction READ WRITE' 1445 curs.execute(cmd) 1446 cmd = 'set default_transaction_read_only to off' 1447 curs.execute(cmd) 1448 1449 curs.close() 1450 conn.commit() 1451 1452 conn.is_decorated = False 1453 1454 return conn
1455 # =======================================================================
1456 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1457 """Get a new connection. 1458 1459 This assumes the locale system has been initialized 1460 unless an encoding is specified. 1461 """ 1462 # FIXME: support pooled on RW, too 1463 # FIXME: for now, support the default DSN only 1464 if pooled and readonly and (dsn is None): 1465 global __ro_conn_pool 1466 if __ro_conn_pool is None: 1467 __ro_conn_pool = cConnectionPool ( 1468 minconn = 1, 1469 maxconn = 2, 1470 dsn = dsn, 1471 verbose = verbose 1472 ) 1473 conn = __ro_conn_pool.getconn() 1474 else: 1475 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1476 1477 if conn.is_decorated: 1478 return conn 1479 1480 if encoding is None: 1481 encoding = _default_client_encoding 1482 if encoding is None: 1483 encoding = gmI18N.get_encoding() 1484 _log.warning('client encoding not specified') 1485 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1486 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1487 1488 # set connection properties 1489 # - client encoding 1490 try: 1491 conn.set_client_encoding(encoding) 1492 except dbapi.OperationalError: 1493 t, v, tb = sys.exc_info() 1494 if str(v).find("can't set encoding to") != -1: 1495 raise cEncodingError, (encoding, v), tb 1496 raise 1497 1498 # - transaction isolation level 1499 if readonly: 1500 iso_level = u'read committed' 1501 else: 1502 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1503 iso_level = u'serializable' 1504 1505 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s]', encoding, iso_level, _default_client_timezone) 1506 1507 curs = conn.cursor() 1508 1509 # - client time zone 1510 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1511 1512 conn.commit() 1513 1514 # FIXME: remove this whole affair once either 9.0 is standard (Ubuntu 10 LTS is 1515 # FIXME: PG 8.4, however!) or else when psycopg2 supports a workaround 1516 # 1517 # - bytea data format 1518 # PG 9.0 switched to - by default - using "hex" rather than "escape", 1519 # however, psycopg2's linked with a pre-9.0 libpq do assume "escape" 1520 # as the transmission mode for bytea output, 1521 # so try to set this setting back to "escape", 1522 # if that's not possible the reason will be that PG < 9.0 does not support 1523 # that setting - which also means we don't need it and can ignore the 1524 # failure 1525 cmd = "set bytea_output to 'escape'" 1526 try: 1527 curs.execute(cmd) 1528 except dbapi.ProgrammingError: 1529 _log.error('cannot set bytea_output format') 1530 1531 curs.close() 1532 conn.commit() 1533 1534 conn.is_decorated = True 1535 1536 return conn
1537 #-----------------------------------------------------------------------
1538 -def shutdown():
1539 if __ro_conn_pool is None: 1540 return 1541 __ro_conn_pool.shutdown()
1542 # ====================================================================== 1543 # internal helpers 1544 #-----------------------------------------------------------------------
1545 -def __noop():
1546 pass
1547 #-----------------------------------------------------------------------
1548 -def _raise_exception_on_ro_conn_close():
1549 raise TypeError(u'close() called on read-only connection')
1550 #-----------------------------------------------------------------------
1551 -def log_database_access(action=None):
1552 run_insert ( 1553 schema = u'gm', 1554 table = u'access_log', 1555 values = {u'user_action': action}, 1556 end_tx = True 1557 )
1558 #-----------------------------------------------------------------------
1559 -def sanity_check_time_skew(tolerance=60):
1560 """Check server time and local time to be within 1561 the given tolerance of each other. 1562 1563 tolerance: seconds 1564 """ 1565 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1566 1567 cmd = u"SELECT now() at time zone 'UTC'" 1568 conn = get_raw_connection(readonly=True) 1569 curs = conn.cursor() 1570 1571 start = time.time() 1572 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1573 end = time.time() 1574 client_now_as_utc = pydt.datetime.utcnow() 1575 1576 curs.close() 1577 conn.commit() 1578 1579 server_now_as_utc = rows[0][0] 1580 query_duration = end - start 1581 _log.info('server "now" (UTC): %s', server_now_as_utc) 1582 _log.info('client "now" (UTC): %s', client_now_as_utc) 1583 _log.debug('wire roundtrip (seconds): %s', query_duration) 1584 1585 if query_duration > tolerance: 1586 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1587 return False 1588 1589 if server_now_as_utc > client_now_as_utc: 1590 real_skew = server_now_as_utc - client_now_as_utc 1591 else: 1592 real_skew = client_now_as_utc - server_now_as_utc 1593 1594 _log.debug('client/server time skew: %s', real_skew) 1595 1596 if real_skew > pydt.timedelta(seconds = tolerance): 1597 _log.error('client/server time skew > tolerance') 1598 return False 1599 1600 return True
1601 #-----------------------------------------------------------------------
1602 -def sanity_check_database_settings():
1603 """Checks database settings. 1604 1605 returns (status, message) 1606 status: 1607 0: no problem 1608 1: non-fatal problem 1609 2: fatal problem 1610 """ 1611 _log.debug('checking database settings') 1612 1613 conn = get_connection() 1614 1615 # - version string 1616 global postgresql_version_string 1617 if postgresql_version_string is None: 1618 curs = conn.cursor() 1619 curs.execute('SELECT version()') 1620 postgresql_version_string = curs.fetchone()['version'] 1621 curs.close() 1622 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1623 1624 options2check = { 1625 # setting: [expected value, risk, fatal?] 1626 u'allow_system_table_mods': [u'off', u'system breakage', False], 1627 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1628 u'datestyle': [u'ISO', u'faulty timestamp parsing', True], 1629 u'default_transaction_isolation': [u'read committed', u'faulty database reads', True], 1630 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1631 u'fsync': [u'on', u'data loss/corruption', True], 1632 u'full_page_writes': [u'on', u'data loss/corruption', False], 1633 u'lc_messages': [u'C', u'suboptimal error detection', False], 1634 u'password_encryption': [u'on', u'breach of confidentiality', False], 1635 u'regex_flavor': [u'advanced', u'query breakage', False], # 9.0 doesn't support this anymore, default now advanced anyway 1636 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1637 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1638 } 1639 1640 from Gnumed.pycommon import gmCfg2 1641 _cfg = gmCfg2.gmCfgData() 1642 if _cfg.get(option = u'hipaa'): 1643 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1644 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1645 else: 1646 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1647 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1648 1649 cmd = u"SELECT name, setting from pg_settings where name in %(settings)s" 1650 rows, idx = run_ro_queries ( 1651 link_obj = conn, 1652 queries = [{'cmd': cmd, 'args': {'settings': tuple(options2check.keys())}}], 1653 get_col_idx = False 1654 ) 1655 1656 found_error = False 1657 found_problem = False 1658 msg = [] 1659 for row in rows: 1660 option = row['name'] 1661 value_found = row['setting'] 1662 value_expected = options2check[option][0] 1663 risk = options2check[option][1] 1664 fatal_setting = options2check[option][2] 1665 if value_found != value_expected: 1666 if fatal_setting is True: 1667 found_error = True 1668 elif fatal_setting is False: 1669 found_problem = True 1670 elif fatal_setting is None: 1671 pass 1672 else: 1673 _log.error(options2check[option]) 1674 raise ValueError(u'invalid database configuration sanity check') 1675 msg.append(_(' option [%s]: %s') % (option, value_found)) 1676 msg.append(_(' risk: %s') % risk) 1677 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (option, value_found, value_expected, risk)) 1678 1679 if found_error: 1680 return 2, u'\n'.join(msg) 1681 1682 if found_problem: 1683 return 1, u'\n'.join(msg) 1684 1685 return 0, u''
1686 #------------------------------------------------------------------------
1687 -def __log_PG_settings(curs=None):
1688 # don't use any of the run_*()s since that might 1689 # create a loop if we fail here 1690 # FIXME: use pg_settings 1691 try: 1692 curs.execute(u'show all') 1693 except: 1694 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1695 return False 1696 settings = curs.fetchall() 1697 if settings is None: 1698 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1699 return False 1700 for setting in settings: 1701 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1702 return True
1703 # =======================================================================
1704 -def extract_msg_from_pg_exception(exc=None):
1705 1706 try: 1707 msg = exc.args[0] 1708 except (AttributeError, IndexError, TypeError): 1709 return u'cannot extract message from exception' 1710 1711 return unicode(msg, gmI18N.get_encoding(), 'replace')
1712 # =======================================================================
1713 -class cAuthenticationError(dbapi.OperationalError):
1714
1715 - def __init__(self, dsn=None, prev_val=None):
1716 self.dsn = dsn 1717 self.prev_val = prev_val
1718
1719 - def __str__(self):
1720 _log.warning('%s.__str__() called', self.__class__.__name__) 1721 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1722 _log.error(tmp) 1723 return tmp.encode(gmI18N.get_encoding(), 'replace')
1724
1725 - def __unicode__(self):
1726 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1727 1728 # ======================================================================= 1729 # custom psycopg2 extensions 1730 # =======================================================================
1731 -class cEncodingError(dbapi.OperationalError):
1732
1733 - def __init__(self, encoding=None, prev_val=None):
1734 self.encoding = encoding 1735 self.prev_val = prev_val
1736
1737 - def __str__(self):
1738 _log.warning('%s.__str__() called', self.__class__.__name__) 1739 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1740
1741 - def __unicode__(self):
1742 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1743 1744 # ----------------------------------------------------------------------- 1745 # Python -> PostgreSQL 1746 # ----------------------------------------------------------------------- 1747 # test when Squeeze (and thus psycopg2 2.2 becomes Stable
1748 -class cAdapterPyDateTime(object):
1749
1750 - def __init__(self, dt):
1751 if dt.tzinfo is None: 1752 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1753 self.__dt = dt
1754
1755 - def getquoted(self):
1756 return _timestamp_template % self.__dt.isoformat()
1757 1758 ## remove for 0.9 1759 ## ---------------------------------------------------------------------- 1760 ##class cAdapterMxDateTime(object): 1761 ## 1762 ## def __init__(self, dt): 1763 ## if dt.tz == '???': 1764 ## _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1765 ## self.__dt = dt 1766 ## 1767 ## def getquoted(self): 1768 ## # under some locale settings the mx.DateTime ISO formatter 1769 ## # will insert "," into the ISO string, 1770 ## # while this is allowed per the ISO8601 spec PostgreSQL 1771 ## # cannot currently handle that, 1772 ## # so map those "," to "." to make things work: 1773 ## return mxDT.ISO.str(self.__dt).replace(',', '.') 1774 ## 1775 ## ---------------------------------------------------------------------- 1776 ## PostgreSQL -> Python 1777 ## ---------------------------------------------------------------------- 1778 1779 #======================================================================= 1780 # main 1781 #----------------------------------------------------------------------- 1782 1783 # make sure psycopg2 knows how to handle unicode ... 1784 # intended to become standard 1785 # test when Squeeze (and thus psycopg2 2.2 becomes Stable 1786 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1787 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1788 1789 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1790 # check in 0.9: 1791 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1792 1793 ## remove for 0.9 1794 #try: 1795 # import mx.DateTime as mxDT 1796 ## psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime) 1797 #except ImportError: 1798 # _log.warning('cannot import mx.DateTime') 1799 1800 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1801 # them adapted to "... ARRAY[]..." so we can support PG arrays 1802 1803 #======================================================================= 1804 if __name__ == "__main__": 1805 1806 if len(sys.argv) < 2: 1807 sys.exit() 1808 1809 if sys.argv[1] != 'test': 1810 sys.exit() 1811 1812 logging.basicConfig(level=logging.DEBUG) 1813 #--------------------------------------------------------------------
1814 - def test_file2bytea():
1815 run_rw_queries(queries = [ 1816 {'cmd': u'create table test_bytea (data bytea)'} 1817 ]) 1818 1819 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1820 try: 1821 file2bytea(query = cmd, filename = sys.argv[2]) 1822 except: 1823 _log.exception('error') 1824 1825 run_rw_queries(queries = [ 1826 {'cmd': u'drop table test_bytea'} 1827 ])
1828 #--------------------------------------------------------------------
1829 - def test_get_connection():
1830 print "testing get_connection()" 1831 1832 dsn = 'foo' 1833 try: 1834 conn = get_connection(dsn=dsn) 1835 except dbapi.OperationalError, e: 1836 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1837 t, v = sys.exc_info()[:2] 1838 print ' ', t 1839 print ' ', v 1840 1841 dsn = 'dbname=gnumed_v9' 1842 try: 1843 conn = get_connection(dsn=dsn) 1844 except cAuthenticationError: 1845 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1846 t, v = sys.exc_info()[:2] 1847 print ' ', t 1848 print ' ', v 1849 1850 dsn = 'dbname=gnumed_v9 user=abc' 1851 try: 1852 conn = get_connection(dsn=dsn) 1853 except cAuthenticationError: 1854 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1855 t, v = sys.exc_info()[:2] 1856 print ' ', t 1857 print ' ', v 1858 1859 dsn = 'dbname=gnumed_v9 user=any-doc' 1860 try: 1861 conn = get_connection(dsn=dsn) 1862 except cAuthenticationError: 1863 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1864 t, v = sys.exc_info()[:2] 1865 print ' ', t 1866 print ' ', v 1867 1868 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1869 try: 1870 conn = get_connection(dsn=dsn) 1871 except cAuthenticationError: 1872 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1873 t, v = sys.exc_info()[:2] 1874 print ' ', t 1875 print ' ', v 1876 1877 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1878 conn = get_connection(dsn=dsn, readonly=True) 1879 1880 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1881 conn = get_connection(dsn=dsn, readonly=False) 1882 1883 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1884 encoding = 'foo' 1885 try: 1886 conn = get_connection(dsn=dsn, encoding=encoding) 1887 except cEncodingError: 1888 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1889 t, v = sys.exc_info()[:2] 1890 print ' ', t 1891 print ' ', v
1892 #--------------------------------------------------------------------
1893 - def test_exceptions():
1894 print "testing exceptions" 1895 1896 try: 1897 raise cAuthenticationError('no dsn', 'no previous exception') 1898 except cAuthenticationError: 1899 t, v, tb = sys.exc_info() 1900 print t 1901 print v 1902 print tb 1903 1904 try: 1905 raise cEncodingError('no dsn', 'no previous exception') 1906 except cEncodingError: 1907 t, v, tb = sys.exc_info() 1908 print t 1909 print v 1910 print tb
1911 #--------------------------------------------------------------------
1912 - def test_ro_queries():
1913 print "testing run_ro_queries()" 1914 1915 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1916 conn = get_connection(dsn, readonly=True) 1917 1918 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1919 print data 1920 print idx 1921 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True) 1922 print data 1923 print idx 1924 1925 curs = conn.cursor() 1926 1927 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1928 print data 1929 print idx 1930 1931 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True, verbose=True) 1932 print data 1933 print idx 1934 1935 try: 1936 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 1937 print data 1938 print idx 1939 except psycopg2.ProgrammingError: 1940 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 1941 t, v = sys.exc_info()[:2] 1942 print ' ', t 1943 print ' ', v 1944 1945 curs.close()
1946 #--------------------------------------------------------------------
1947 - def test_request_dsn():
1948 conn = get_connection() 1949 print conn 1950 conn.close()
1951 #--------------------------------------------------------------------
1952 - def test_set_encoding():
1953 print "testing set_default_client_encoding()" 1954 1955 enc = 'foo' 1956 try: 1957 set_default_client_encoding(enc) 1958 print "SUCCESS: encoding [%s] worked" % enc 1959 except ValueError: 1960 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1961 t, v = sys.exc_info()[:2] 1962 print ' ', t 1963 print ' ', v 1964 1965 enc = '' 1966 try: 1967 set_default_client_encoding(enc) 1968 print "SUCCESS: encoding [%s] worked" % enc 1969 except ValueError: 1970 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1971 t, v = sys.exc_info()[:2] 1972 print ' ', t 1973 print ' ', v 1974 1975 enc = 'latin1' 1976 try: 1977 set_default_client_encoding(enc) 1978 print "SUCCESS: encoding [%s] worked" % enc 1979 except ValueError: 1980 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1981 t, v = sys.exc_info()[:2] 1982 print ' ', t 1983 print ' ', v 1984 1985 enc = 'utf8' 1986 try: 1987 set_default_client_encoding(enc) 1988 print "SUCCESS: encoding [%s] worked" % enc 1989 except ValueError: 1990 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1991 t, v = sys.exc_info()[:2] 1992 print ' ', t 1993 print ' ', v 1994 1995 enc = 'unicode' 1996 try: 1997 set_default_client_encoding(enc) 1998 print "SUCCESS: encoding [%s] worked" % enc 1999 except ValueError: 2000 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2001 t, v = sys.exc_info()[:2] 2002 print ' ', t 2003 print ' ', v 2004 2005 enc = 'UNICODE' 2006 try: 2007 set_default_client_encoding(enc) 2008 print "SUCCESS: encoding [%s] worked" % enc 2009 except ValueError: 2010 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2011 t, v = sys.exc_info()[:2] 2012 print ' ', t 2013 print ' ', v
2014 #--------------------------------------------------------------------
2015 - def test_connection_pool():
2016 dsn = get_default_dsn() 2017 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 2018 print pool 2019 print pool.getconn() 2020 print pool.getconn() 2021 print pool.getconn() 2022 print type(pool.getconn())
2023 #--------------------------------------------------------------------
2024 - def test_list_args():
2025 dsn = get_default_dsn() 2026 conn = get_connection(dsn, readonly=True) 2027 curs = conn.cursor() 2028 curs.execute('SELECT * from clin.clin_narrative where narrative = %s', ['a'])
2029 #--------------------------------------------------------------------
2030 - def test_sanitize_pg_regex():
2031 tests = [ 2032 ['(', '\\('] 2033 , ['[', '\\['] 2034 , [')', '\\)'] 2035 ] 2036 for test in tests: 2037 result = sanitize_pg_regex(test[0]) 2038 if result != test[1]: 2039 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
2040 #--------------------------------------------------------------------
2041 - def test_is_pg_interval():
2042 status = True 2043 tests = [ 2044 [None, True], # None == NULL == succeeds ! 2045 [1, True], 2046 ['1', True], 2047 ['abc', False] 2048 ] 2049 2050 if not is_pg_interval(): 2051 print 'ERROR: is_pg_interval() returned "False", expected "True"' 2052 status = False 2053 2054 for test in tests: 2055 result = is_pg_interval(test[0]) 2056 if result != test[1]: 2057 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 2058 status = False 2059 2060 return status
2061 #--------------------------------------------------------------------
2062 - def test_sanity_check_time_skew():
2063 sanity_check_time_skew()
2064 #--------------------------------------------------------------------
2065 - def test_keyword_expansion():
2066 print "keywords, from database:" 2067 print get_text_expansion_keywords() 2068 print "keywords, cached:" 2069 print get_text_expansion_keywords() 2070 print "'$keyword' expands to:" 2071 print expand_keyword(keyword = u'$dvt')
2072 #--------------------------------------------------------------------
2073 - def test_get_foreign_key_details():
2074 for row in get_foreign_keys2column ( 2075 schema = u'dem', 2076 table = u'identity', 2077 column = u'pk' 2078 ): 2079 print '%s.%s references %s.%s.%s' % ( 2080 row['referencing_table'], 2081 row['referencing_column'], 2082 row['referenced_schema'], 2083 row['referenced_table'], 2084 row['referenced_column'] 2085 )
2086 #--------------------------------------------------------------------
2087 - def test_set_user_language():
2088 # (user, language, result, exception type) 2089 tests = [ 2090 # current user 2091 [None, 'de_DE', True], 2092 [None, 'lang_w/o_tx', False], 2093 [None, None, True], 2094 # valid user 2095 ['any-doc', 'de_DE', True], 2096 ['any-doc', 'lang_w/o_tx', False], 2097 ['any-doc', None, True], 2098 # invalid user 2099 ['invalid user', 'de_DE', None], 2100 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 2101 ['invalid user', None, True] 2102 ] 2103 for test in tests: 2104 try: 2105 result = set_user_language(user = test[0], language = test[1]) 2106 if result != test[2]: 2107 print "test:", test 2108 print "result:", result, "expected:", test[2] 2109 except psycopg2.IntegrityError, e: 2110 if test[2] is None: 2111 continue 2112 print "test:", test 2113 print "expected exception" 2114 print "result:", e
2115 #--------------------------------------------------------------------
2116 - def test_get_schema_revision_history():
2117 for line in get_schema_revision_history(): 2118 print u' - '.join(line)
2119 #--------------------------------------------------------------------
2120 - def test_run_query():
2121 gmDateTime.init() 2122 args = {'dt': gmDateTime.pydt_max_here()} 2123 cmd = u"SELECT %(dt)s" 2124 2125 #cmd = u"SELECT 'infinity'::timestamp with time zone" 2126 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 2127 print rows
2128 #--------------------------------------------------------------------
2129 - def test_schema_exists():
2130 print schema_exists()
2131 2132 #-------------------------------------------------------------------- 2133 # run tests 2134 #test_file2bytea() 2135 #test_get_connection() 2136 #test_exceptions() 2137 #test_ro_queries() 2138 #test_request_dsn() 2139 #test_set_encoding() 2140 #test_connection_pool() 2141 #test_list_args() 2142 #test_sanitize_pg_regex() 2143 #test_is_pg_interval() 2144 #test_sanity_check_time_skew() 2145 #test_keyword_expansion() 2146 #test_get_foreign_key_details() 2147 #test_set_user_language() 2148 #test_get_schema_revision_history() 2149 #test_run_query() 2150 test_schema_exists() 2151 2152 # ====================================================================== 2153