Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  __version__ = "$Revision: 1.127 $" 
  15  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  16  __license__ = 'GPL v2 or later (details at http://www.gnu.org)' 
  17   
  18  ### imports ### 
  19  # stdlib 
  20  import time, locale, sys, re as regex, os, codecs, types, datetime as pydt, logging 
  21   
  22   
  23  # GNUmed 
  24  if __name__ == '__main__': 
  25          sys.path.insert(0, '../../') 
  26  from Gnumed.pycommon import gmLoginInfo, gmExceptions, gmDateTime, gmBorg, gmI18N, gmLog2 
  27  from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character 
  28   
  29  _log = logging.getLogger('gm.db') 
  30  _log.info(__version__) 
  31   
  32   
  33  # 3rd party 
  34  try: 
  35          import psycopg2 as dbapi 
  36  except ImportError: 
  37          _log.exception("Python database adapter psycopg2 not found.") 
  38          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  39          raise 
  40  ### imports ### 
  41   
  42   
  43  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  44  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  45  if not (float(dbapi.apilevel) >= 2.0): 
  46          raise ImportError('gmPG2: supported DB-API level too low') 
  47  if not (dbapi.threadsafety > 0): 
  48          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  49  if not (dbapi.paramstyle == 'pyformat'): 
  50          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  51  try: 
  52          dbapi.__version__.index('dt') 
  53  except ValueError: 
  54          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  55  try: 
  56          dbapi.__version__.index('ext') 
  57  except ValueError: 
  58          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  59  try: 
  60          dbapi.__version__.index('pq3') 
  61  except ValueError: 
  62          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  63   
  64  import psycopg2.extras 
  65  import psycopg2.extensions 
  66  import psycopg2.pool 
  67  import psycopg2.errorcodes as sql_error_codes 
  68   
  69  # ======================================================================= 
  70  _default_client_encoding = 'UTF8' 
  71  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  72   
  73  # things timezone 
  74  _default_client_timezone = None                 # default time zone for connections 
  75  _sql_set_timezone = None 
  76  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  77  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  78   
  79  _default_dsn = None 
  80  _default_login = None 
  81   
  82  postgresql_version_string = None 
  83  postgresql_version = None                       # accuracy: major.minor 
  84   
  85  __ro_conn_pool = None 
  86   
  87  auto_request_login_params = True 
  88  # ======================================================================= 
  89  # global data 
  90  # ======================================================================= 
  91   
  92  known_schema_hashes = { 
  93          0: 'not released, testing only', 
  94          2: 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
  95          3: 'e73718eaf230d8f1d2d01afa8462e176', 
  96          4: '4428ccf2e54c289136819e701bb095ea', 
  97          5: '7e7b093af57aea48c288e76632a382e5',  # ... old (v1) style hashes 
  98          6: '90e2026ac2efd236da9c8608b8685b2d',  # new (v2) style hashes ... 
  99          7: '6c9f6d3981483f8e9433df99d1947b27', 
 100          8: '89b13a7af83337c3aad153b717e52360', 
 101          9: '641a9b2be3c378ffc2bb2f0b1c9f051d', 
 102          10: '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 103          11: '03042ae24f3f92877d986fb0a6184d76', 
 104          12: '06183a6616db62257e22814007a8ed07', 
 105          13: 'fab7c1ae408a6530c47f9b5111a0841e', 
 106          14: 'e170d543f067d1ea60bfe9076b1560cf', 
 107          15: '70012ff960b77ecdff4981c94b5b55b6', 
 108          16: '0bcf44ca22c479b52976e5eda1de8161' 
 109  } 
 110   
 111  map_schema_hash2version = { 
 112          'b09d50d7ed3f91ddf4c4ddb8ea507720': 2, 
 113          'e73718eaf230d8f1d2d01afa8462e176': 3, 
 114          '4428ccf2e54c289136819e701bb095ea': 4, 
 115          '7e7b093af57aea48c288e76632a382e5': 5, 
 116          '90e2026ac2efd236da9c8608b8685b2d': 6, 
 117          '6c9f6d3981483f8e9433df99d1947b27': 7, 
 118          '89b13a7af83337c3aad153b717e52360': 8, 
 119          '641a9b2be3c378ffc2bb2f0b1c9f051d': 9, 
 120          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 10, 
 121          '03042ae24f3f92877d986fb0a6184d76': 11, 
 122          '06183a6616db62257e22814007a8ed07': 12, 
 123          'fab7c1ae408a6530c47f9b5111a0841e': 13, 
 124          'e170d543f067d1ea60bfe9076b1560cf': 14, 
 125          '70012ff960b77ecdff4981c94b5b55b6': 15, 
 126          '0bcf44ca22c479b52976e5eda1de8161': 16 
 127  } 
 128   
 129  map_client_branch2required_db_version = { 
 130          u'GIT tree': 0, 
 131          u'0.3': 9, 
 132          u'0.4': 10, 
 133          u'0.5': 11, 
 134          u'0.6': 12, 
 135          u'0.7': 13, 
 136          u'0.8': 14, 
 137          u'0.9': 15, 
 138          u'1.0': 16, 
 139          u'1.1': 16 
 140  } 
 141   
 142  # get columns and data types for a given table 
 143  query_table_col_defs = u"""select 
 144          cols.column_name, 
 145          cols.udt_name 
 146  from 
 147          information_schema.columns cols 
 148  where 
 149          cols.table_schema = %s 
 150                  and 
 151          cols.table_name = %s 
 152  order by 
 153          cols.ordinal_position""" 
 154   
 155  query_table_attributes = u"""select 
 156          cols.column_name 
 157  from 
 158          information_schema.columns cols 
 159  where 
 160          cols.table_schema = %s 
 161                  and 
 162          cols.table_name = %s 
 163  order by 
 164          cols.ordinal_position""" 
 165   
 166  # ======================================================================= 
 167  # module globals API 
 168  # ======================================================================= 
169 -def set_default_client_encoding(encoding = None):
170 # check whether psycopg2 can handle this encoding 171 if encoding not in psycopg2.extensions.encodings: 172 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 173 # check whether Python can handle this encoding 174 py_enc = psycopg2.extensions.encodings[encoding] 175 try: 176 codecs.lookup(py_enc) 177 except LookupError: 178 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 179 raise 180 # FIXME: check encoding against the database 181 # FIXME: - but we may not yet have access 182 # FIXME: - psycopg2 will pull its encodings from the database eventually 183 # it seems save to set it 184 global _default_client_encoding 185 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 186 _default_client_encoding = encoding 187 return True
188 #---------------------------------------------------
189 -def set_default_client_timezone(timezone = None):
190 191 # FIXME: use __validate 192 global _default_client_timezone 193 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 194 _default_client_timezone = timezone 195 196 global _sql_set_timezone 197 _sql_set_timezone = u'set timezone to %s' 198 199 return True
200 #---------------------------------------------------
201 -def __validate_timezone(conn=None, timezone=None):
202 203 _log.debug(u'validating time zone [%s]', timezone) 204 205 cmd = u'set timezone to %(tz)s' 206 args = {u'tz': timezone} 207 208 conn.commit() 209 curs = conn.cursor() 210 is_valid = False 211 try: 212 curs.execute(cmd, args) 213 _log.info(u'time zone [%s] is settable', timezone) 214 # can we actually use it, though ? 215 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 216 try: 217 curs.execute(cmd) 218 curs.fetchone() 219 _log.info(u'time zone [%s] is usable', timezone) 220 is_valid = True 221 except: 222 _log.error('error using time zone [%s]', timezone) 223 except dbapi.DataError: 224 _log.warning(u'time zone [%s] is not settable', timezone) 225 except: 226 _log.error(u'failed to set time zone to [%s]', timezone) 227 _log.exception(u'') 228 229 curs.close() 230 conn.rollback() 231 232 return is_valid
233 #---------------------------------------------------
234 -def __expand_timezone(conn=None, timezone=None):
235 """some timezone defs are abbreviations so try to expand 236 them because "set time zone" doesn't take abbreviations""" 237 238 cmd = u""" 239 select distinct on (abbrev) name 240 from pg_timezone_names 241 where 242 abbrev = %(tz)s and 243 name ~ '^[^/]+/[^/]+$' and 244 name !~ '^Etc/' 245 """ 246 args = {u'tz': timezone} 247 248 conn.commit() 249 curs = conn.cursor() 250 251 result = timezone 252 try: 253 curs.execute(cmd, args) 254 rows = curs.fetchall() 255 if len(rows) > 0: 256 result = rows[0][0] 257 _log.debug(u'[%s] maps to [%s]', timezone, result) 258 except: 259 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 260 261 curs.close() 262 conn.rollback() 263 264 return result
265 #---------------------------------------------------
266 -def __detect_client_timezone(conn=None):
267 """This is run on the very first connection.""" 268 269 # FIXME: check whether server.timezone is the same 270 # FIXME: value as what we eventually detect 271 272 # we need gmDateTime to be initialized 273 if gmDateTime.current_local_iso_numeric_timezone_string is None: 274 gmDateTime.init() 275 276 _log.debug('trying to detect timezone from system') 277 278 tz_candidates = [] 279 try: 280 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 281 tz_candidates.append(tz) 282 expanded = __expand_timezone(conn = conn, timezone = tz) 283 if expanded != tz: 284 tz_candidates.append(expanded) 285 except KeyError: 286 pass 287 288 tz_candidates.append(gmDateTime.current_local_timezone_name) 289 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 290 if expanded != gmDateTime.current_local_timezone_name: 291 tz_candidates.append(expanded) 292 293 _log.debug('candidates: %s', str(tz_candidates)) 294 295 # find best among candidates 296 global _default_client_timezone 297 global _sql_set_timezone 298 found = False 299 for tz in tz_candidates: 300 if __validate_timezone(conn = conn, timezone = tz): 301 _default_client_timezone = tz 302 _sql_set_timezone = u'set timezone to %s' 303 found = True 304 break 305 306 if not found: 307 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 308 _sql_set_timezone = u"set time zone interval %s hour to minute" 309 310 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
311 # ======================================================================= 312 # login API 313 # =======================================================================
314 -def __request_login_params_tui():
315 """Text mode request of database login parameters""" 316 import getpass 317 login = gmLoginInfo.LoginInfo() 318 319 print "\nPlease enter the required login parameters:" 320 try: 321 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '') 322 login.database = prompted_input(prompt = "database", default = 'gnumed_v16') 323 login.user = prompted_input(prompt = "user name", default = '') 324 tmp = 'password for "%s" (not shown): ' % login.user 325 login.password = getpass.getpass(tmp) 326 login.port = prompted_input(prompt = "port", default = 5432) 327 except KeyboardInterrupt: 328 _log.warning("user cancelled text mode login dialog") 329 print "user cancelled text mode login dialog" 330 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 331 332 return login
333 #---------------------------------------------------
334 -def __request_login_params_gui_wx():
335 """GUI (wx) input request for database login parameters. 336 337 Returns gmLoginInfo.LoginInfo object 338 """ 339 import wx 340 # OK, wxPython was already loaded. But has the main Application instance 341 # been initialized yet ? if not, the exception will kick us out 342 if wx.GetApp() is None: 343 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 344 345 # Let's launch the login dialog 346 # if wx was not initialized /no main App loop, an exception should be raised anyway 347 import gmAuthWidgets 348 dlg = gmAuthWidgets.cLoginDialog(None, -1) 349 dlg.ShowModal() 350 login = dlg.panel.GetLoginInfo() 351 dlg.Destroy() 352 353 #if user cancelled or something else went wrong, raise an exception 354 if login is None: 355 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 356 357 return login
358 #---------------------------------------------------
359 -def request_login_params():
360 """Request login parameters for database connection.""" 361 # do we auto-request parameters at all ? 362 if not auto_request_login_params: 363 raise Exception('Cannot request login parameters.') 364 365 # are we inside X ? 366 # (if we aren't wxGTK will crash hard at 367 # C-level with "can't open Display") 368 if os.environ.has_key('DISPLAY'): 369 # try wxPython GUI 370 try: return __request_login_params_gui_wx() 371 except: pass 372 373 # well, either we are on the console or 374 # wxPython does not work, use text mode 375 return __request_login_params_tui()
376 377 # ======================================================================= 378 # DSN API 379 # -----------------------------------------------------------------------
380 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
381 dsn_parts = [] 382 383 if (database is not None) and (database.strip() != ''): 384 dsn_parts.append('dbname=%s' % database) 385 386 if (host is not None) and (host.strip() != ''): 387 dsn_parts.append('host=%s' % host) 388 389 if (port is not None) and (str(port).strip() != ''): 390 dsn_parts.append('port=%s' % port) 391 392 if (user is not None) and (user.strip() != ''): 393 dsn_parts.append('user=%s' % user) 394 395 if (password is not None) and (password.strip() != ''): 396 dsn_parts.append('password=%s' % password) 397 398 dsn_parts.append('sslmode=prefer') 399 400 return ' '.join(dsn_parts)
401 # ------------------------------------------------------
402 -def get_default_login():
403 # make sure we do have a login 404 get_default_dsn() 405 return _default_login
406 # ------------------------------------------------------
407 -def get_default_dsn():
408 global _default_dsn 409 if _default_dsn is not None: 410 return _default_dsn 411 412 login = request_login_params() 413 set_default_login(login=login) 414 415 return _default_dsn
416 # ------------------------------------------------------
417 -def set_default_login(login=None):
418 if login is None: 419 return False 420 421 if login.host is not None: 422 if login.host.strip() == u'': 423 login.host = None 424 425 global _default_login 426 _default_login = login 427 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 428 429 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 430 431 global _default_dsn 432 if _default_dsn is None: 433 old_dsn = u'None' 434 else: 435 old_dsn = regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, _default_dsn) 436 _log.info ('setting default DSN from [%s] to [%s]', 437 old_dsn, 438 regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, dsn) 439 ) 440 _default_dsn = dsn 441 442 return True
443 # ======================================================================= 444 # netadata API 445 # =======================================================================
446 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
447 expected_hash = known_schema_hashes[version] 448 if version == 0: 449 args = {'ver': 9999} 450 else: 451 args = {'ver': version} 452 rows, idx = run_ro_queries ( 453 link_obj = link_obj, 454 queries = [{ 455 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 456 'args': args 457 }] 458 ) 459 if rows[0]['md5'] != expected_hash: 460 _log.error('database schema version mismatch') 461 _log.error('expected: %s (%s)' % (version, expected_hash)) 462 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 463 if verbose: 464 _log.debug('schema dump follows:') 465 for line in get_schema_structure(link_obj=link_obj).split(): 466 _log.debug(line) 467 _log.debug('schema revision history dump follows:') 468 for line in get_schema_revision_history(link_obj=link_obj): 469 _log.debug(u' - '.join(line)) 470 return False 471 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 472 return True
473 #------------------------------------------------------------------------
474 -def get_schema_version(link_obj=None):
475 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 476 try: 477 return map_schema_hash2version[rows[0]['md5']] 478 except KeyError: 479 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
480 #------------------------------------------------------------------------
481 -def get_schema_structure(link_obj=None):
482 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 483 return rows[0][0]
484 #------------------------------------------------------------------------
485 -def get_schema_hash(link_obj=None):
486 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 487 return rows[0]['md5']
488 #------------------------------------------------------------------------
489 -def get_schema_revision_history(link_obj=None):
490 cmd = u""" 491 select 492 imported::text, 493 version, 494 filename 495 from gm.schema_revision 496 order by imported 497 """ 498 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': cmd}]) 499 return rows
500 #------------------------------------------------------------------------
501 -def get_current_user():
502 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 503 return rows[0][0]
504 #------------------------------------------------------------------------
505 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
506 """Get the foreign keys pointing to schema.table.column. 507 508 Does not properly work with multi-column FKs. 509 GNUmed doesn't use any, however. 510 """ 511 cmd = u""" 512 select 513 %(schema)s as referenced_schema, 514 %(tbl)s as referenced_table, 515 %(col)s as referenced_column, 516 pgc.confkey as referenced_column_list, 517 pgc.conrelid::regclass as referencing_table, 518 pgc.conkey as referencing_column_list, 519 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 520 from 521 pg_constraint pgc 522 where 523 pgc.contype = 'f' 524 and 525 pgc.confrelid = ( 526 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 527 select oid from pg_namespace where nspname = %(schema)s 528 ) 529 ) and 530 ( 531 select attnum 532 from pg_attribute 533 where 534 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 535 select oid from pg_namespace where nspname = %(schema)s 536 )) 537 and 538 attname = %(col)s 539 ) = any(pgc.confkey) 540 """ 541 542 args = { 543 'schema': schema, 544 'tbl': table, 545 'col': column 546 } 547 548 rows, idx = run_ro_queries ( 549 link_obj = link_obj, 550 queries = [ 551 {'cmd': cmd, 'args': args} 552 ] 553 ) 554 555 return rows
556 #------------------------------------------------------------------------
557 -def get_child_tables(schema='public', table=None, link_obj=None):
558 """Return child tables of <table>.""" 559 cmd = u""" 560 select 561 pgn.nspname as namespace, 562 pgc.relname as table 563 from 564 pg_namespace pgn, 565 pg_class pgc 566 where 567 pgc.relnamespace = pgn.oid 568 and 569 pgc.oid in ( 570 select inhrelid from pg_inherits where inhparent = ( 571 select oid from pg_class where 572 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 573 relname = %(table)s 574 ) 575 )""" 576 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 577 return rows
578 #------------------------------------------------------------------------
579 -def schema_exists(link_obj=None, schema=u'gm'):
580 cmd = u"""SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = %(schema)s)""" 581 args = {'schema': schema} 582 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}]) 583 return rows[0][0]
584 #------------------------------------------------------------------------
585 -def table_exists(link_obj=None, schema=None, table=None):
586 """Returns false, true.""" 587 cmd = u""" 588 select exists ( 589 select 1 from information_schema.tables 590 where 591 table_schema = %s and 592 table_name = %s and 593 table_type = 'BASE TABLE' 594 )""" 595 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 596 return rows[0][0]
597 #------------------------------------------------------------------------
598 -def get_col_indices(cursor = None):
599 if cursor.description is None: 600 _log.error('no result description available: unused cursor or last query did not select rows') 601 return None 602 col_indices = {} 603 col_index = 0 604 for col_desc in cursor.description: 605 col_name = col_desc[0] 606 # a query like "select 1,2;" will return two columns of the same name ! 607 # hence adjust to that, note, however, that dict-style access won't work 608 # on results of such queries ... 609 if col_indices.has_key(col_name): 610 col_name = '%s_%s' % (col_name, col_index) 611 col_indices[col_name] = col_index 612 col_index += 1 613 614 return col_indices
615 #------------------------------------------------------------------------
616 -def get_col_defs(link_obj=None, schema='public', table=None):
617 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 618 col_names = [] 619 col_type = {} 620 for row in rows: 621 col_names.append(row[0]) 622 # map array types 623 if row[1].startswith('_'): 624 col_type[row[0]] = row[1][1:] + '[]' 625 else: 626 col_type[row[0]] = row[1] 627 col_defs = [] 628 col_defs.append(col_names) 629 col_defs.append(col_type) 630 return col_defs
631 #------------------------------------------------------------------------
632 -def get_col_names(link_obj=None, schema='public', table=None):
633 """Return column attributes of table""" 634 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 635 cols = [] 636 for row in rows: 637 cols.append(row[0]) 638 return cols
639 640 #------------------------------------------------------------------------ 641 # i18n functions 642 #------------------------------------------------------------------------
643 -def export_translations_from_database(filename=None):
644 tx_file = codecs.open(filename, 'wb', 'utf8') 645 tx_file.write(u'-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M')) 646 tx_file.write(u'-- - contains translations for each of [%s]\n' % u', '.join(get_translation_languages())) 647 tx_file.write(u'-- - user language is set to [%s]\n\n' % get_current_user_language()) 648 tx_file.write(u'-- Please email this file to <gnumed-devel@gnu.org>.\n') 649 tx_file.write(u'-- ----------------------------------------------------------------------------------------------\n\n') 650 tx_file.write(u'set default_transaction_read_only to off\n\n') 651 tx_file.write(u'\\unset ON_ERROR_STOP\n\n') 652 653 cmd = u'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig' 654 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 655 for row in rows: 656 line = u"select i18n.upd_tx(quote_literal(E'%s'), quote_literal(E'%s'), quote_literal(E'%s'));\n" % ( 657 row['lang'].replace("'", "\\'"), 658 row['orig'].replace("'", "\\'"), 659 row['trans'].replace("'", "\\'") 660 ) 661 tx_file.write(line) 662 tx_file.write(u'\n') 663 664 tx_file.write(u'\set ON_ERROR_STOP 1\n') 665 tx_file.close() 666 667 return True
668 #------------------------------------------------------------------------
669 -def delete_translation_from_database(link_obj=None, language=None, original=None):
670 cmd = u'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s' 671 args = {'lang': language, 'orig': original} 672 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True) 673 return True
674 675 #------------------------------------------------------------------------
676 -def update_translation_in_database(language=None, original=None, translation=None):
677 cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)' 678 args = {'lang': language, 'orig': original, 'trans': translation} 679 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False) 680 return args
681 682 #------------------------------------------------------------------------
683 -def get_translation_languages():
684 rows, idx = run_ro_queries ( 685 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 686 ) 687 return [ r[0] for r in rows ]
688 689 #------------------------------------------------------------------------
690 -def get_database_translations(language=None, order_by=None):
691 692 args = {'lang': language} 693 _log.debug('language [%s]', language) 694 695 if order_by is None: 696 order_by = u'ORDER BY %s' % order_by 697 else: 698 order_by = u'ORDER BY lang, orig' 699 700 if language is None: 701 cmd = u""" 702 SELECT DISTINCT ON (orig, lang) 703 lang, orig, trans 704 FROM (( 705 706 -- strings stored as translation keys whether translated or not 707 SELECT 708 NULL as lang, 709 ik.orig, 710 NULL AS trans 711 FROM 712 i18n.keys ik 713 714 ) UNION ALL ( 715 716 -- already translated strings 717 SELECT 718 it.lang, 719 it.orig, 720 it.trans 721 FROM 722 i18n.translations it 723 724 )) as translatable_strings 725 %s""" % order_by 726 else: 727 cmd = u""" 728 SELECT DISTINCT ON (orig, lang) 729 lang, orig, trans 730 FROM (( 731 732 -- strings stored as translation keys whether translated or not 733 SELECT 734 %%(lang)s as lang, 735 ik.orig, 736 i18n._(ik.orig, %%(lang)s) AS trans 737 FROM 738 i18n.keys ik 739 740 ) UNION ALL ( 741 742 -- already translated strings 743 SELECT 744 %%(lang)s as lang, 745 it.orig, 746 i18n._(it.orig, %%(lang)s) AS trans 747 FROM 748 i18n.translations it 749 750 )) AS translatable_strings 751 %s""" % order_by 752 753 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 754 755 if rows is None: 756 _log.error('no translatable strings found') 757 else: 758 _log.debug('%s translatable strings found', len(rows)) 759 760 return rows
761 762 #------------------------------------------------------------------------
763 -def get_current_user_language():
764 cmd = u'select i18n.get_curr_lang()' 765 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 766 return rows[0][0]
767 768 #------------------------------------------------------------------------
769 -def set_user_language(user=None, language=None):
770 """Set the user language in the database. 771 772 user = None: current db user 773 language = None: unset 774 """ 775 _log.info('setting database language for user [%s] to [%s]', user, language) 776 777 args = { 778 'usr': user, 779 'lang': language 780 } 781 782 if language is None: 783 if user is None: 784 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 785 else: 786 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 787 queries.append({'cmd': u'select True'}) 788 else: 789 if user is None: 790 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 791 else: 792 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 793 794 rows, idx = run_rw_queries(queries = queries, return_data = True) 795 796 if not rows[0][0]: 797 _log.error('cannot set database language to [%s] for user [%s]', language, user) 798 799 return rows[0][0]
800 #------------------------------------------------------------------------
801 -def force_user_language(language=None):
802 """Set the user language in the database. 803 804 - regardless of whether there is any translation available. 805 - only for the current user 806 """ 807 _log.info('forcing database language for current db user to [%s]', language) 808 809 run_rw_queries(queries = [{ 810 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 811 'args': {'lang': language} 812 }])
813 #------------------------------------------------------------------------ 814 #------------------------------------------------------------------------ 815 text_expansion_keywords = None 816
817 -def get_text_expansion_keywords():
818 global text_expansion_keywords 819 if text_expansion_keywords is not None: 820 return text_expansion_keywords 821 822 cmd = u"""select keyword, public_expansion, private_expansion, owner from clin.v_keyword_expansions""" 823 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 824 text_expansion_keywords = rows 825 826 _log.info('retrieved %s text expansion keywords', len(text_expansion_keywords)) 827 828 return text_expansion_keywords
829 #------------------------------------------------------------------------
830 -def expand_keyword(keyword = None):
831 832 # Easter Egg ;-) 833 if keyword == u'$$steffi': 834 return u'Hai, play ! Versucht das ! (Keks dazu ?) :-)' 835 836 cmd = u"""select expansion from clin.v_your_keyword_expansions where keyword = %(kwd)s""" 837 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 838 839 if len(rows) == 0: 840 return None 841 842 return rows[0]['expansion']
843 #------------------------------------------------------------------------
844 -def get_keyword_expansion_candidates(keyword = None):
845 846 if keyword is None: 847 return [] 848 849 get_text_expansion_keywords() 850 851 candidates = [] 852 for kwd in text_expansion_keywords: 853 if kwd['keyword'].startswith(keyword): 854 candidates.append(kwd['keyword']) 855 856 return candidates
857 #------------------------------------------------------------------------
858 -def add_text_expansion(keyword=None, expansion=None, public=None):
859 860 if public: 861 cmd = u"SELECT 1 from clin.v_keyword_expansions where public_expansion is true and keyword = %(kwd)s" 862 else: 863 cmd = u"SELECT 1 from clin.v_your_keyword_expansions where private_expansion is true and keyword = %(kwd)s" 864 865 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 866 if len(rows) != 0: 867 return False 868 869 if public: 870 cmd = u""" 871 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 872 values (%(kwd)s, %(exp)s, null)""" 873 else: 874 cmd = u""" 875 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 876 values (%(kwd)s, %(exp)s, (SELECT pk from dem.staff where db_user = current_user))""" 877 878 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword, 'exp': expansion}}]) 879 880 global text_expansion_keywords 881 text_expansion_keywords = None 882 883 return True
884 #------------------------------------------------------------------------
885 -def delete_text_expansion(keyword):
886 cmd = u""" 887 delete from clin.keyword_expansion where 888 keyword = %(kwd)s and ( 889 (fk_staff = (SELECT pk from dem.staff where db_user = current_user)) 890 or 891 (fk_staff is null and owner = current_user) 892 )""" 893 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 894 895 global text_expansion_keywords 896 text_expansion_keywords = None
897 #------------------------------------------------------------------------
898 -def edit_text_expansion(keyword, expansion):
899 900 cmd1 = u""" 901 DELETE FROM clin.keyword_expansion 902 WHERE 903 keyword = %(kwd)s 904 AND 905 fk_staff = (SELECT pk FROM dem.staff WHERE db_user = current_user)""" 906 907 cmd2 = u""" 908 INSERT INTO clin.keyword_expansion ( 909 keyword, expansion, fk_staff 910 ) VALUES ( 911 %(kwd)s, 912 %(exp)s, 913 (SELECT pk FROM dem.staff WHERE db_user = current_user) 914 )""" 915 args = {'kwd': keyword, 'exp': expansion} 916 rows, idx = run_rw_queries(queries = [ 917 {'cmd': cmd1, 'args': args}, 918 {'cmd': cmd2, 'args': args}, 919 ]) 920 921 global text_expansion_keywords 922 text_expansion_keywords = None
923 # ======================================================================= 924 # query runners and helpers 925 # =======================================================================
926 -def send_maintenance_notification():
927 cmd = u'notify "db_maintenance_warning:"' 928 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
929 #------------------------------------------------------------------------
930 -def send_maintenance_shutdown():
931 cmd = u'notify "db_maintenance_disconnect:"' 932 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
933 #------------------------------------------------------------------------
934 -def is_pg_interval(candidate=None):
935 cmd = u'SELECT %(candidate)s::interval' 936 try: 937 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 938 return True 939 except: 940 cmd = u'SELECT %(candidate)s::text::interval' 941 try: 942 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 943 return True 944 except: 945 return False
946 #------------------------------------------------------------------------
947 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
948 outfile = file(filename, 'wb') 949 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 950 outfile.close() 951 return result
952 #------------------------------------------------------------------------
953 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
954 """Store data from a bytea field into a file. 955 956 <data_query> 957 - dict {'cmd': ..., 'args': ...} 958 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 959 - 'args' must be a dict 960 - must return one row with one field of type bytea 961 <file> 962 - must be a file like Python object 963 <data_size> 964 - integer of the total size of the expected data or None 965 <data_size_query> 966 - dict {'cmd': ..., 'args': ...} 967 - cmd must be unicode 968 - must return one row with one field with the octet_length() of the data field 969 - used only when <data_size> is None 970 """ 971 if data_size == 0: 972 return True 973 974 # If the client sets an encoding other than the default we 975 # will receive encoding-parsed data which isn't the binary 976 # content we want. Hence we need to get our own connection. 977 # It must be a read-write one so that we don't affect the 978 # encoding for other users of the shared read-only 979 # connections. 980 # Actually, encodings shouldn't be applied to binary data 981 # (eg. bytea types) in the first place but that is only 982 # reported to be fixed > v7.4. 983 # further tests reveal that at least on PG 8.0 this bug still 984 # manifests itself 985 conn = get_raw_connection(readonly=True) 986 987 if data_size is None: 988 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 989 data_size = rows[0][0] 990 if data_size in [None, 0]: 991 conn.rollback() 992 return True 993 994 _log.debug('expecting bytea data of size: [%s] bytes' % data_size) 995 _log.debug('using chunk size of: [%s] bytes' % chunk_size) 996 997 # chunk size of 0 means "retrieve whole field at once" 998 if chunk_size == 0: 999 chunk_size = data_size 1000 _log.debug('chunk size [0] bytes: retrieving all data at once') 1001 1002 # Windoze sucks: it can't transfer objects of arbitrary size, 1003 # anyways, we need to split the transfer, 1004 # however, only possible if postgres >= 7.2 1005 needed_chunks, remainder = divmod(data_size, chunk_size) 1006 _log.debug('chunks to retrieve: [%s]' % needed_chunks) 1007 _log.debug('remainder to retrieve: [%s] bytes' % remainder) 1008 1009 # try setting "bytea_output" 1010 # - fails if not necessary 1011 # - succeeds if necessary 1012 try: 1013 run_ro_queries(link_obj = conn, queries = [{'cmd': u"set bytea_output to 'escape'"}]) 1014 except dbapi.ProgrammingError: 1015 _log.debug('failed to set bytea_output to "escape", not necessary') 1016 1017 # retrieve chunks, skipped if data size < chunk size, 1018 # does this not carry the danger of cutting up multi-byte escape sequences ? 1019 # no, since bytea is binary, 1020 # yes, since in bytea there are *some* escaped values, still 1021 # no, since those are only escaped during *transfer*, not on-disk, hence 1022 # only complete escape sequences are put on the wire 1023 for chunk_id in range(needed_chunks): 1024 chunk_start = (chunk_id * chunk_size) + 1 1025 data_query['args']['start'] = chunk_start 1026 data_query['args']['size'] = chunk_size 1027 try: 1028 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1029 except: 1030 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 1031 conn.rollback() 1032 raise 1033 # it would be a fatal error to see more than one result as ids are supposed to be unique 1034 file_obj.write(str(rows[0][0])) 1035 1036 # retrieve remainder 1037 if remainder > 0: 1038 chunk_start = (needed_chunks * chunk_size) + 1 1039 data_query['args']['start'] = chunk_start 1040 data_query['args']['size'] = remainder 1041 try: 1042 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1043 except: 1044 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 1045 conn.rollback() 1046 raise 1047 # it would be a fatal error to see more than one result as ids are supposed to be unique 1048 file_obj.write(str(rows[0][0])) 1049 1050 conn.rollback() 1051 return True
1052 #------------------------------------------------------------------------
1053 -def file2bytea(query=None, filename=None, args=None, conn=None):
1054 """Store data from a file into a bytea field. 1055 1056 The query must: 1057 - be in unicode 1058 - contain a format spec identifying the row (eg a primary key) 1059 matching <args> if it is an UPDATE 1060 - contain a format spec %(data)s::bytea 1061 """ 1062 # read data from file 1063 infile = file(filename, "rb") 1064 data_as_byte_string = infile.read() 1065 infile.close() 1066 if args is None: 1067 args = {} 1068 args['data'] = buffer(data_as_byte_string) 1069 del(data_as_byte_string) 1070 1071 # insert the data 1072 if conn is None: 1073 conn = get_raw_connection(readonly=False) 1074 close_conn = True 1075 else: 1076 close_conn = False 1077 1078 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 1079 1080 if close_conn: 1081 conn.close() 1082 1083 return
1084 #------------------------------------------------------------------------
1085 -def sanitize_pg_regex(expression=None, escape_all=False):
1086 """Escape input for use in a PostgreSQL regular expression. 1087 1088 If a fragment comes from user input and is to be used 1089 as a regular expression we need to make sure it doesn't 1090 contain invalid regex patterns such as unbalanced ('s. 1091 1092 <escape_all> 1093 True: try to escape *all* metacharacters 1094 False: only escape those which render the regex invalid 1095 """ 1096 return expression.replace ( 1097 '(', '\(' 1098 ).replace ( 1099 ')', '\)' 1100 ).replace ( 1101 '[', '\[' 1102 ).replace ( 1103 '+', '\+' 1104 ).replace ( 1105 '.', '\.' 1106 ).replace ( 1107 '*', '\*' 1108 )
1109 #']', '\]', # not needed 1110 #------------------------------------------------------------------------
1111 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1112 """Run read-only queries. 1113 1114 <queries> must be a list of dicts: 1115 [ 1116 {'cmd': <string>, 'args': <dict> or <tuple>}, 1117 {...}, 1118 ... 1119 ] 1120 """ 1121 if isinstance(link_obj, dbapi._psycopg.cursor): 1122 curs = link_obj 1123 curs_close = __noop 1124 tx_rollback = __noop 1125 elif isinstance(link_obj, dbapi._psycopg.connection): 1126 curs = link_obj.cursor() 1127 curs_close = curs.close 1128 tx_rollback = link_obj.rollback 1129 elif link_obj is None: 1130 conn = get_connection(readonly=True, verbose=verbose) 1131 curs = conn.cursor() 1132 curs_close = curs.close 1133 tx_rollback = conn.rollback 1134 else: 1135 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 1136 1137 if verbose: 1138 _log.debug('cursor: %s', curs) 1139 1140 for query in queries: 1141 if type(query['cmd']) is not types.UnicodeType: 1142 print "run_ro_queries(): non-unicode query" 1143 print query['cmd'] 1144 try: 1145 args = query['args'] 1146 except KeyError: 1147 args = None 1148 try: 1149 curs.execute(query['cmd'], args) 1150 if verbose: 1151 _log.debug('ran query: [%s]', curs.query) 1152 _log.debug('PG status message: %s', curs.statusmessage) 1153 _log.debug('cursor description: %s', str(curs.description)) 1154 except: 1155 # FIXME: use .pgcode 1156 try: 1157 curs_close() 1158 except dbapi.InterfaceError: 1159 _log.exception('cannot close cursor') 1160 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1161 _log.error('query failed: [%s]', curs.query) 1162 _log.error('PG status message: %s', curs.statusmessage) 1163 raise 1164 1165 data = None 1166 col_idx = None 1167 if return_data: 1168 data = curs.fetchall() 1169 if verbose: 1170 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1171 _log.debug('cursor description: %s', str(curs.description)) 1172 if get_col_idx: 1173 col_idx = get_col_indices(curs) 1174 1175 curs_close() 1176 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1177 return (data, col_idx)
1178 #------------------------------------------------------------------------
1179 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1180 """Convenience function for running a transaction 1181 that is supposed to get committed. 1182 1183 <link_obj> 1184 can be either: 1185 - a cursor 1186 - a connection 1187 1188 <queries> 1189 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1190 to be executed as a single transaction, the last 1191 query may usefully return rows (such as a 1192 "SELECT currval('some_sequence')" statement) 1193 1194 <end_tx> 1195 - controls whether the transaction is finalized (eg. 1196 committed/rolled back) or not, this allows the 1197 call to run_rw_queries() to be part of a framing 1198 transaction 1199 - if link_obj is a connection then <end_tx> will 1200 default to False unless it is explicitly set to 1201 True which is taken to mean "yes, you do have full 1202 control over the transaction" in which case the 1203 transaction is properly finalized 1204 - if link_obj is a cursor we CANNOT finalize the 1205 transaction because we would need the connection for that 1206 - if link_obj is None <end_tx> will, of course, always be True 1207 1208 <return_data> 1209 - if true, the returned data will include the rows 1210 the last query selected 1211 - if false, it returns None instead 1212 1213 <get_col_idx> 1214 - if true, the returned data will include a dictionary 1215 mapping field names to column positions 1216 - if false, the returned data returns None instead 1217 1218 method result: 1219 - returns a tuple (data, idx) 1220 - <data>: 1221 * (None, None) if last query did not return rows 1222 * ("fetchall() result", <index>) if last query returned any rows 1223 * for <index> see <get_col_idx> 1224 """ 1225 if isinstance(link_obj, dbapi._psycopg.cursor): 1226 conn_close = __noop 1227 conn_commit = __noop 1228 conn_rollback = __noop 1229 curs = link_obj 1230 curs_close = __noop 1231 elif isinstance(link_obj, dbapi._psycopg.connection): 1232 conn_close = __noop 1233 if end_tx: 1234 conn_commit = link_obj.commit 1235 conn_rollback = link_obj.rollback 1236 else: 1237 conn_commit = __noop 1238 conn_rollback = __noop 1239 curs = link_obj.cursor() 1240 curs_close = curs.close 1241 elif link_obj is None: 1242 conn = get_connection(readonly=False) 1243 conn_close = conn.close 1244 conn_commit = conn.commit 1245 conn_rollback = conn.rollback 1246 curs = conn.cursor() 1247 curs_close = curs.close 1248 else: 1249 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1250 1251 for query in queries: 1252 if type(query['cmd']) is not types.UnicodeType: 1253 print "run_rw_queries(): non-unicode query" 1254 print query['cmd'] 1255 try: 1256 args = query['args'] 1257 except KeyError: 1258 args = None 1259 try: 1260 curs.execute(query['cmd'], args) 1261 except: 1262 _log.exception('error running RW query') 1263 gmLog2.log_stack_trace() 1264 try: 1265 curs_close() 1266 conn_rollback() 1267 conn_close() 1268 except dbapi.InterfaceError: 1269 _log.exception('cannot cleanup') 1270 raise 1271 raise 1272 1273 data = None 1274 col_idx = None 1275 if return_data: 1276 try: 1277 data = curs.fetchall() 1278 except: 1279 _log.exception('error fetching data from RW query') 1280 gmLog2.log_stack_trace() 1281 try: 1282 curs_close() 1283 conn_rollback() 1284 conn_close() 1285 except dbapi.InterfaceError: 1286 _log.exception('cannot cleanup') 1287 raise 1288 raise 1289 if get_col_idx: 1290 col_idx = get_col_indices(curs) 1291 1292 curs_close() 1293 conn_commit() 1294 conn_close() 1295 1296 return (data, col_idx)
1297 #------------------------------------------------------------------------
1298 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
1299 """Generates SQL for an INSERT query. 1300 1301 values: dict of values keyed by field to insert them into 1302 """ 1303 if schema is None: 1304 schema = u'public' 1305 1306 fields = values.keys() # that way val_snippets and fields really should end up in the same order 1307 val_snippets = [] 1308 for field in fields: 1309 val_snippets.append(u'%%(%s)s' % field) 1310 1311 if returning is None: 1312 returning = u'' 1313 return_data = False 1314 else: 1315 returning = u'\n\tRETURNING\n\t\t%s' % u', '.join(returning) 1316 return_data = True 1317 1318 cmd = u"""\nINSERT INTO %s.%s ( 1319 %s 1320 ) VALUES ( 1321 %s 1322 )%s""" % ( 1323 schema, 1324 table, 1325 u',\n\t\t'.join(fields), 1326 u',\n\t\t'.join(val_snippets), 1327 returning 1328 ) 1329 1330 _log.debug(u'running SQL: >>>%s<<<', cmd) 1331 1332 return run_rw_queries ( 1333 link_obj = link_obj, 1334 queries = [{'cmd': cmd, 'args': values}], 1335 end_tx = end_tx, 1336 return_data = return_data, 1337 get_col_idx = get_col_idx, 1338 verbose = verbose 1339 )
1340 # ======================================================================= 1341 # connection handling API 1342 # -----------------------------------------------------------------------
1343 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1344 """ 1345 GNUmed database connection pool. 1346 1347 Extends psycopg2's PersistentConnectionPool with 1348 a custom _connect() function. Supports one connection 1349 per thread - which also ties it to one particular DSN. 1350 """ 1351 #--------------------------------------------------
1352 - def _connect(self, key=None):
1353 1354 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1355 1356 conn.original_close = conn.close 1357 conn.close = _raise_exception_on_ro_conn_close 1358 1359 if key is not None: 1360 self._used[key] = conn 1361 self._rused[id(conn)] = key 1362 else: 1363 self._pool.append(conn) 1364 1365 return conn
1366 #--------------------------------------------------
1367 - def shutdown(self):
1368 for conn_key in self._used.keys(): 1369 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1370 self._used[conn_key].original_close()
1371 # -----------------------------------------------------------------------
1372 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1373 """Get a raw, unadorned connection. 1374 1375 - this will not set any parameters such as encoding, timezone, datestyle 1376 - the only requirement is a valid DSN 1377 - hence it can be used for "service" connections 1378 for verifying encodings etc 1379 """ 1380 # FIXME: support verbose 1381 if dsn is None: 1382 dsn = get_default_dsn() 1383 1384 if u'host=salaam.homeunix' in dsn: 1385 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.') 1386 1387 try: 1388 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1389 except dbapi.OperationalError, e: 1390 1391 t, v, tb = sys.exc_info() 1392 try: 1393 msg = e.args[0] 1394 except (AttributeError, IndexError, TypeError): 1395 raise 1396 1397 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1398 1399 if msg.find('fe_sendauth') != -1: 1400 raise cAuthenticationError, (dsn, msg), tb 1401 1402 if regex.search('user ".*" does not exist', msg) is not None: 1403 raise cAuthenticationError, (dsn, msg), tb 1404 1405 if msg.find('uthenti') != -1: 1406 raise cAuthenticationError, (dsn, msg), tb 1407 1408 raise 1409 1410 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1411 1412 # do first-time stuff 1413 global postgresql_version 1414 if postgresql_version is None: 1415 curs = conn.cursor() 1416 curs.execute(""" 1417 SELECT 1418 substring(setting, E'^\\\\d{1,2}\\\\.\\\\d{1,2}')::numeric AS version 1419 FROM 1420 pg_settings 1421 WHERE 1422 name = 'server_version' 1423 """) 1424 postgresql_version = curs.fetchone()['version'] 1425 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1426 try: 1427 curs.execute("SELECT pg_size_pretty(pg_database_size(current_database()))") 1428 _log.info('database size: %s', curs.fetchone()[0]) 1429 except: 1430 pass 1431 if verbose: 1432 __log_PG_settings(curs=curs) 1433 curs.close() 1434 conn.commit() 1435 1436 if _default_client_timezone is None: 1437 __detect_client_timezone(conn = conn) 1438 1439 curs = conn.cursor() 1440 1441 # set access mode 1442 if readonly: 1443 _log.debug('access mode [READ ONLY]') 1444 cmd = 'set session characteristics as transaction READ ONLY' 1445 curs.execute(cmd) 1446 cmd = 'set default_transaction_read_only to on' 1447 curs.execute(cmd) 1448 else: 1449 _log.debug('access mode [READ WRITE]') 1450 cmd = 'set session characteristics as transaction READ WRITE' 1451 curs.execute(cmd) 1452 cmd = 'set default_transaction_read_only to off' 1453 curs.execute(cmd) 1454 1455 curs.close() 1456 conn.commit() 1457 1458 conn.is_decorated = False 1459 1460 return conn
1461 # =======================================================================
1462 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1463 """Get a new connection. 1464 1465 This assumes the locale system has been initialized 1466 unless an encoding is specified. 1467 """ 1468 # FIXME: support pooled on RW, too 1469 # FIXME: for now, support the default DSN only 1470 if pooled and readonly and (dsn is None): 1471 global __ro_conn_pool 1472 if __ro_conn_pool is None: 1473 __ro_conn_pool = cConnectionPool ( 1474 minconn = 1, 1475 maxconn = 2, 1476 dsn = dsn, 1477 verbose = verbose 1478 ) 1479 conn = __ro_conn_pool.getconn() 1480 else: 1481 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1482 1483 if conn.is_decorated: 1484 return conn 1485 1486 if encoding is None: 1487 encoding = _default_client_encoding 1488 if encoding is None: 1489 encoding = gmI18N.get_encoding() 1490 _log.warning('client encoding not specified') 1491 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1492 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1493 1494 # set connection properties 1495 # - client encoding 1496 try: 1497 conn.set_client_encoding(encoding) 1498 except dbapi.OperationalError: 1499 t, v, tb = sys.exc_info() 1500 if str(v).find("can't set encoding to") != -1: 1501 raise cEncodingError, (encoding, v), tb 1502 raise 1503 1504 # - transaction isolation level 1505 if readonly: 1506 iso_level = u'read committed' 1507 else: 1508 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1509 iso_level = u'serializable' 1510 1511 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s]', encoding, iso_level, _default_client_timezone) 1512 1513 curs = conn.cursor() 1514 1515 # - client time zone 1516 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1517 1518 conn.commit() 1519 1520 # FIXME: remove this whole affair once either 9.0 is standard (Ubuntu 10 LTS is 1521 # FIXME: PG 8.4, however!) or else when psycopg2 supports a workaround 1522 # 1523 # - bytea data format 1524 # PG 9.0 switched to - by default - using "hex" rather than "escape", 1525 # however, psycopg2's linked with a pre-9.0 libpq do assume "escape" 1526 # as the transmission mode for bytea output, 1527 # so try to set this setting back to "escape", 1528 # if that's not possible the reason will be that PG < 9.0 does not support 1529 # that setting - which also means we don't need it and can ignore the 1530 # failure 1531 cmd = "set bytea_output to 'escape'" 1532 try: 1533 curs.execute(cmd) 1534 except dbapi.ProgrammingError: 1535 _log.error('cannot set bytea_output format') 1536 1537 curs.close() 1538 conn.commit() 1539 1540 conn.is_decorated = True 1541 1542 return conn
1543 #-----------------------------------------------------------------------
1544 -def shutdown():
1545 if __ro_conn_pool is None: 1546 return 1547 __ro_conn_pool.shutdown()
1548 # ====================================================================== 1549 # internal helpers 1550 #-----------------------------------------------------------------------
1551 -def __noop():
1552 pass
1553 #-----------------------------------------------------------------------
1554 -def _raise_exception_on_ro_conn_close():
1555 raise TypeError(u'close() called on read-only connection')
1556 #-----------------------------------------------------------------------
1557 -def log_database_access(action=None):
1558 run_insert ( 1559 schema = u'gm', 1560 table = u'access_log', 1561 values = {u'user_action': action}, 1562 end_tx = True 1563 )
1564 #-----------------------------------------------------------------------
1565 -def sanity_check_time_skew(tolerance=60):
1566 """Check server time and local time to be within 1567 the given tolerance of each other. 1568 1569 tolerance: seconds 1570 """ 1571 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1572 1573 cmd = u"SELECT now() at time zone 'UTC'" 1574 conn = get_raw_connection(readonly=True) 1575 curs = conn.cursor() 1576 1577 start = time.time() 1578 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1579 end = time.time() 1580 client_now_as_utc = pydt.datetime.utcnow() 1581 1582 curs.close() 1583 conn.commit() 1584 1585 server_now_as_utc = rows[0][0] 1586 query_duration = end - start 1587 _log.info('server "now" (UTC): %s', server_now_as_utc) 1588 _log.info('client "now" (UTC): %s', client_now_as_utc) 1589 _log.debug('wire roundtrip (seconds): %s', query_duration) 1590 1591 if query_duration > tolerance: 1592 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1593 return False 1594 1595 if server_now_as_utc > client_now_as_utc: 1596 real_skew = server_now_as_utc - client_now_as_utc 1597 else: 1598 real_skew = client_now_as_utc - server_now_as_utc 1599 1600 _log.debug('client/server time skew: %s', real_skew) 1601 1602 if real_skew > pydt.timedelta(seconds = tolerance): 1603 _log.error('client/server time skew > tolerance') 1604 return False 1605 1606 return True
1607 #-----------------------------------------------------------------------
1608 -def sanity_check_database_settings():
1609 """Checks database settings. 1610 1611 returns (status, message) 1612 status: 1613 0: no problem 1614 1: non-fatal problem 1615 2: fatal problem 1616 """ 1617 _log.debug('checking database settings') 1618 1619 conn = get_connection() 1620 1621 # - version string 1622 global postgresql_version_string 1623 if postgresql_version_string is None: 1624 curs = conn.cursor() 1625 curs.execute('SELECT version()') 1626 postgresql_version_string = curs.fetchone()['version'] 1627 curs.close() 1628 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1629 1630 options2check = { 1631 # setting: [expected value, risk, fatal?] 1632 u'allow_system_table_mods': [u'off', u'system breakage', False], 1633 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1634 u'datestyle': [u'ISO', u'faulty timestamp parsing', True], 1635 u'default_transaction_isolation': [u'read committed', u'faulty database reads', True], 1636 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1637 u'fsync': [u'on', u'data loss/corruption', True], 1638 u'full_page_writes': [u'on', u'data loss/corruption', False], 1639 u'lc_messages': [u'C', u'suboptimal error detection', False], 1640 u'password_encryption': [u'on', u'breach of confidentiality', False], 1641 u'regex_flavor': [u'advanced', u'query breakage', False], # 9.0 doesn't support this anymore, default now advanced anyway 1642 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1643 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1644 } 1645 1646 from Gnumed.pycommon import gmCfg2 1647 _cfg = gmCfg2.gmCfgData() 1648 if _cfg.get(option = u'hipaa'): 1649 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1650 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1651 else: 1652 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1653 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1654 1655 cmd = u"SELECT name, setting from pg_settings where name in %(settings)s" 1656 rows, idx = run_ro_queries ( 1657 link_obj = conn, 1658 queries = [{'cmd': cmd, 'args': {'settings': tuple(options2check.keys())}}], 1659 get_col_idx = False 1660 ) 1661 1662 found_error = False 1663 found_problem = False 1664 msg = [] 1665 for row in rows: 1666 option = row['name'] 1667 value_found = row['setting'] 1668 value_expected = options2check[option][0] 1669 risk = options2check[option][1] 1670 fatal_setting = options2check[option][2] 1671 if value_found != value_expected: 1672 if fatal_setting is True: 1673 found_error = True 1674 elif fatal_setting is False: 1675 found_problem = True 1676 elif fatal_setting is None: 1677 pass 1678 else: 1679 _log.error(options2check[option]) 1680 raise ValueError(u'invalid database configuration sanity check') 1681 msg.append(_(' option [%s]: %s') % (option, value_found)) 1682 msg.append(_(' risk: %s') % risk) 1683 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (option, value_found, value_expected, risk)) 1684 1685 if found_error: 1686 return 2, u'\n'.join(msg) 1687 1688 if found_problem: 1689 return 1, u'\n'.join(msg) 1690 1691 return 0, u''
1692 #------------------------------------------------------------------------
1693 -def __log_PG_settings(curs=None):
1694 # don't use any of the run_*()s since that might 1695 # create a loop if we fail here 1696 # FIXME: use pg_settings 1697 try: 1698 curs.execute(u'show all') 1699 except: 1700 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1701 return False 1702 settings = curs.fetchall() 1703 if settings is None: 1704 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1705 return False 1706 for setting in settings: 1707 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1708 return True
1709 # =======================================================================
1710 -def extract_msg_from_pg_exception(exc=None):
1711 1712 try: 1713 msg = exc.args[0] 1714 except (AttributeError, IndexError, TypeError): 1715 return u'cannot extract message from exception' 1716 1717 return unicode(msg, gmI18N.get_encoding(), 'replace')
1718 # =======================================================================
1719 -class cAuthenticationError(dbapi.OperationalError):
1720
1721 - def __init__(self, dsn=None, prev_val=None):
1722 self.dsn = dsn 1723 self.prev_val = prev_val
1724
1725 - def __str__(self):
1726 _log.warning('%s.__str__() called', self.__class__.__name__) 1727 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1728 _log.error(tmp) 1729 return tmp.encode(gmI18N.get_encoding(), 'replace')
1730
1731 - def __unicode__(self):
1732 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1733 1734 # ======================================================================= 1735 # custom psycopg2 extensions 1736 # =======================================================================
1737 -class cEncodingError(dbapi.OperationalError):
1738
1739 - def __init__(self, encoding=None, prev_val=None):
1740 self.encoding = encoding 1741 self.prev_val = prev_val
1742
1743 - def __str__(self):
1744 _log.warning('%s.__str__() called', self.__class__.__name__) 1745 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1746
1747 - def __unicode__(self):
1748 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1749 1750 # ----------------------------------------------------------------------- 1751 # Python -> PostgreSQL 1752 # ----------------------------------------------------------------------- 1753 # test when Squeeze (and thus psycopg2 2.2 becomes Stable
1754 -class cAdapterPyDateTime(object):
1755
1756 - def __init__(self, dt):
1757 if dt.tzinfo is None: 1758 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1759 self.__dt = dt
1760
1761 - def getquoted(self):
1762 return _timestamp_template % self.__dt.isoformat()
1763 1764 ## remove for 0.9 1765 ## ---------------------------------------------------------------------- 1766 ##class cAdapterMxDateTime(object): 1767 ## 1768 ## def __init__(self, dt): 1769 ## if dt.tz == '???': 1770 ## _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1771 ## self.__dt = dt 1772 ## 1773 ## def getquoted(self): 1774 ## # under some locale settings the mx.DateTime ISO formatter 1775 ## # will insert "," into the ISO string, 1776 ## # while this is allowed per the ISO8601 spec PostgreSQL 1777 ## # cannot currently handle that, 1778 ## # so map those "," to "." to make things work: 1779 ## return mxDT.ISO.str(self.__dt).replace(',', '.') 1780 ## 1781 ## ---------------------------------------------------------------------- 1782 ## PostgreSQL -> Python 1783 ## ---------------------------------------------------------------------- 1784 1785 #======================================================================= 1786 # main 1787 #----------------------------------------------------------------------- 1788 1789 # make sure psycopg2 knows how to handle unicode ... 1790 # intended to become standard 1791 # test when Squeeze (and thus psycopg2 2.2 becomes Stable 1792 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1793 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1794 1795 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1796 # check in 0.9: 1797 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1798 1799 ## remove for 0.9 1800 #try: 1801 # import mx.DateTime as mxDT 1802 ## psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime) 1803 #except ImportError: 1804 # _log.warning('cannot import mx.DateTime') 1805 1806 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1807 # them adapted to "... ARRAY[]..." so we can support PG arrays 1808 1809 #======================================================================= 1810 if __name__ == "__main__": 1811 1812 if len(sys.argv) < 2: 1813 sys.exit() 1814 1815 if sys.argv[1] != 'test': 1816 sys.exit() 1817 1818 logging.basicConfig(level=logging.DEBUG) 1819 #--------------------------------------------------------------------
1820 - def test_file2bytea():
1821 run_rw_queries(queries = [ 1822 {'cmd': u'create table test_bytea (data bytea)'} 1823 ]) 1824 1825 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1826 try: 1827 file2bytea(query = cmd, filename = sys.argv[2]) 1828 except: 1829 _log.exception('error') 1830 1831 run_rw_queries(queries = [ 1832 {'cmd': u'drop table test_bytea'} 1833 ])
1834 #--------------------------------------------------------------------
1835 - def test_get_connection():
1836 print "testing get_connection()" 1837 1838 dsn = 'foo' 1839 try: 1840 conn = get_connection(dsn=dsn) 1841 except dbapi.OperationalError, e: 1842 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1843 t, v = sys.exc_info()[:2] 1844 print ' ', t 1845 print ' ', v 1846 1847 dsn = 'dbname=gnumed_v9' 1848 try: 1849 conn = get_connection(dsn=dsn) 1850 except cAuthenticationError: 1851 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1852 t, v = sys.exc_info()[:2] 1853 print ' ', t 1854 print ' ', v 1855 1856 dsn = 'dbname=gnumed_v9 user=abc' 1857 try: 1858 conn = get_connection(dsn=dsn) 1859 except cAuthenticationError: 1860 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1861 t, v = sys.exc_info()[:2] 1862 print ' ', t 1863 print ' ', v 1864 1865 dsn = 'dbname=gnumed_v9 user=any-doc' 1866 try: 1867 conn = get_connection(dsn=dsn) 1868 except cAuthenticationError: 1869 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1870 t, v = sys.exc_info()[:2] 1871 print ' ', t 1872 print ' ', v 1873 1874 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1875 try: 1876 conn = get_connection(dsn=dsn) 1877 except cAuthenticationError: 1878 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1879 t, v = sys.exc_info()[:2] 1880 print ' ', t 1881 print ' ', v 1882 1883 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1884 conn = get_connection(dsn=dsn, readonly=True) 1885 1886 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1887 conn = get_connection(dsn=dsn, readonly=False) 1888 1889 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1890 encoding = 'foo' 1891 try: 1892 conn = get_connection(dsn=dsn, encoding=encoding) 1893 except cEncodingError: 1894 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1895 t, v = sys.exc_info()[:2] 1896 print ' ', t 1897 print ' ', v
1898 #--------------------------------------------------------------------
1899 - def test_exceptions():
1900 print "testing exceptions" 1901 1902 try: 1903 raise cAuthenticationError('no dsn', 'no previous exception') 1904 except cAuthenticationError: 1905 t, v, tb = sys.exc_info() 1906 print t 1907 print v 1908 print tb 1909 1910 try: 1911 raise cEncodingError('no dsn', 'no previous exception') 1912 except cEncodingError: 1913 t, v, tb = sys.exc_info() 1914 print t 1915 print v 1916 print tb
1917 #--------------------------------------------------------------------
1918 - def test_ro_queries():
1919 print "testing run_ro_queries()" 1920 1921 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1922 conn = get_connection(dsn, readonly=True) 1923 1924 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1925 print data 1926 print idx 1927 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True) 1928 print data 1929 print idx 1930 1931 curs = conn.cursor() 1932 1933 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1934 print data 1935 print idx 1936 1937 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True, verbose=True) 1938 print data 1939 print idx 1940 1941 try: 1942 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 1943 print data 1944 print idx 1945 except psycopg2.ProgrammingError: 1946 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 1947 t, v = sys.exc_info()[:2] 1948 print ' ', t 1949 print ' ', v 1950 1951 curs.close()
1952 #--------------------------------------------------------------------
1953 - def test_request_dsn():
1954 conn = get_connection() 1955 print conn 1956 conn.close()
1957 #--------------------------------------------------------------------
1958 - def test_set_encoding():
1959 print "testing set_default_client_encoding()" 1960 1961 enc = 'foo' 1962 try: 1963 set_default_client_encoding(enc) 1964 print "SUCCESS: encoding [%s] worked" % enc 1965 except ValueError: 1966 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1967 t, v = sys.exc_info()[:2] 1968 print ' ', t 1969 print ' ', v 1970 1971 enc = '' 1972 try: 1973 set_default_client_encoding(enc) 1974 print "SUCCESS: encoding [%s] worked" % enc 1975 except ValueError: 1976 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1977 t, v = sys.exc_info()[:2] 1978 print ' ', t 1979 print ' ', v 1980 1981 enc = 'latin1' 1982 try: 1983 set_default_client_encoding(enc) 1984 print "SUCCESS: encoding [%s] worked" % enc 1985 except ValueError: 1986 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1987 t, v = sys.exc_info()[:2] 1988 print ' ', t 1989 print ' ', v 1990 1991 enc = 'utf8' 1992 try: 1993 set_default_client_encoding(enc) 1994 print "SUCCESS: encoding [%s] worked" % enc 1995 except ValueError: 1996 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1997 t, v = sys.exc_info()[:2] 1998 print ' ', t 1999 print ' ', v 2000 2001 enc = 'unicode' 2002 try: 2003 set_default_client_encoding(enc) 2004 print "SUCCESS: encoding [%s] worked" % enc 2005 except ValueError: 2006 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2007 t, v = sys.exc_info()[:2] 2008 print ' ', t 2009 print ' ', v 2010 2011 enc = 'UNICODE' 2012 try: 2013 set_default_client_encoding(enc) 2014 print "SUCCESS: encoding [%s] worked" % enc 2015 except ValueError: 2016 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2017 t, v = sys.exc_info()[:2] 2018 print ' ', t 2019 print ' ', v
2020 #--------------------------------------------------------------------
2021 - def test_connection_pool():
2022 dsn = get_default_dsn() 2023 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 2024 print pool 2025 print pool.getconn() 2026 print pool.getconn() 2027 print pool.getconn() 2028 print type(pool.getconn())
2029 #--------------------------------------------------------------------
2030 - def test_list_args():
2031 dsn = get_default_dsn() 2032 conn = get_connection(dsn, readonly=True) 2033 curs = conn.cursor() 2034 curs.execute('SELECT * from clin.clin_narrative where narrative = %s', ['a'])
2035 #--------------------------------------------------------------------
2036 - def test_sanitize_pg_regex():
2037 tests = [ 2038 ['(', '\\('] 2039 , ['[', '\\['] 2040 , [')', '\\)'] 2041 ] 2042 for test in tests: 2043 result = sanitize_pg_regex(test[0]) 2044 if result != test[1]: 2045 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
2046 #--------------------------------------------------------------------
2047 - def test_is_pg_interval():
2048 status = True 2049 tests = [ 2050 [None, True], # None == NULL == succeeds ! 2051 [1, True], 2052 ['1', True], 2053 ['abc', False] 2054 ] 2055 2056 if not is_pg_interval(): 2057 print 'ERROR: is_pg_interval() returned "False", expected "True"' 2058 status = False 2059 2060 for test in tests: 2061 result = is_pg_interval(test[0]) 2062 if result != test[1]: 2063 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 2064 status = False 2065 2066 return status
2067 #--------------------------------------------------------------------
2068 - def test_sanity_check_time_skew():
2069 sanity_check_time_skew()
2070 #--------------------------------------------------------------------
2071 - def test_keyword_expansion():
2072 print "keywords, from database:" 2073 print get_text_expansion_keywords() 2074 print "keywords, cached:" 2075 print get_text_expansion_keywords() 2076 print "'$keyword' expands to:" 2077 print expand_keyword(keyword = u'$dvt')
2078 #--------------------------------------------------------------------
2079 - def test_get_foreign_key_details():
2080 for row in get_foreign_keys2column ( 2081 schema = u'dem', 2082 table = u'identity', 2083 column = u'pk' 2084 ): 2085 print '%s.%s references %s.%s.%s' % ( 2086 row['referencing_table'], 2087 row['referencing_column'], 2088 row['referenced_schema'], 2089 row['referenced_table'], 2090 row['referenced_column'] 2091 )
2092 #--------------------------------------------------------------------
2093 - def test_set_user_language():
2094 # (user, language, result, exception type) 2095 tests = [ 2096 # current user 2097 [None, 'de_DE', True], 2098 [None, 'lang_w/o_tx', False], 2099 [None, None, True], 2100 # valid user 2101 ['any-doc', 'de_DE', True], 2102 ['any-doc', 'lang_w/o_tx', False], 2103 ['any-doc', None, True], 2104 # invalid user 2105 ['invalid user', 'de_DE', None], 2106 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 2107 ['invalid user', None, True] 2108 ] 2109 for test in tests: 2110 try: 2111 result = set_user_language(user = test[0], language = test[1]) 2112 if result != test[2]: 2113 print "test:", test 2114 print "result:", result, "expected:", test[2] 2115 except psycopg2.IntegrityError, e: 2116 if test[2] is None: 2117 continue 2118 print "test:", test 2119 print "expected exception" 2120 print "result:", e
2121 #--------------------------------------------------------------------
2122 - def test_get_schema_revision_history():
2123 for line in get_schema_revision_history(): 2124 print u' - '.join(line)
2125 #--------------------------------------------------------------------
2126 - def test_run_query():
2127 gmDateTime.init() 2128 args = {'dt': gmDateTime.pydt_max_here()} 2129 cmd = u"SELECT %(dt)s" 2130 2131 #cmd = u"SELECT 'infinity'::timestamp with time zone" 2132 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 2133 print rows
2134 #--------------------------------------------------------------------
2135 - def test_schema_exists():
2136 print schema_exists()
2137 2138 #-------------------------------------------------------------------- 2139 # run tests 2140 #test_file2bytea() 2141 #test_get_connection() 2142 #test_exceptions() 2143 #test_ro_queries() 2144 #test_request_dsn() 2145 #test_set_encoding() 2146 #test_connection_pool() 2147 #test_list_args() 2148 #test_sanitize_pg_regex() 2149 #test_is_pg_interval() 2150 #test_sanity_check_time_skew() 2151 #test_keyword_expansion() 2152 #test_get_foreign_key_details() 2153 #test_set_user_language() 2154 #test_get_schema_revision_history() 2155 #test_run_query() 2156 test_schema_exists() 2157 2158 # ====================================================================== 2159