1 """GNUmed database object business class.
2
3 Overview
4 --------
5 This class wraps a source relation (table, view) which
6 represents an entity that makes immediate business sense
7 such as a vaccination or a medical document. In many if
8 not most cases this source relation is a denormalizing
9 view. The data in that view will in most cases, however,
10 originate from several normalized tables. One instance
11 of this class represents one row of said source relation.
12
13 Note, however, that this class does not *always* simply
14 wrap a single table or view. It can also encompass several
15 relations (views, tables, sequences etc) that taken together
16 form an object meaningful to *business* logic.
17
18 Initialization
19 --------------
20 There are two ways to initialize an instance with values.
21 One way is to pass a "primary key equivalent" object into
22 __init__(). Refetch_payload() will then pull the data from
23 the backend. Another way would be to fetch the data outside
24 the instance and pass it in via the <row> argument. In that
25 case the instance will not initially connect to the databse
26 which may offer a great boost to performance.
27
28 Values API
29 ----------
30 Field values are cached for later access. They can be accessed
31 by a dictionary API, eg:
32
33 old_value = object['field']
34 object['field'] = new_value
35
36 The field names correspond to the respective column names
37 in the "main" source relation. Accessing non-existant field
38 names will raise an error, so does trying to set fields not
39 listed in self.__class__._updatable_fields. To actually
40 store updated values in the database one must explicitly
41 call save_payload().
42
43 The class will in many cases be enhanced by accessors to
44 related data that is not directly part of the business
45 object itself but are closely related, such as codes
46 linked to a clinical narrative entry (eg a diagnosis). Such
47 accessors in most cases start with get_*. Related setters
48 start with set_*. The values can be accessed via the
49 object['field'] syntax, too, but they will be cached
50 independantly.
51
52 Concurrency handling
53 --------------------
54 GNUmed connections always run transactions in isolation level
55 "serializable". This prevents transactions happening at the
56 *very same time* to overwrite each other's data. All but one
57 of them will abort with a concurrency error (eg if a
58 transaction runs a select-for-update later than another one
59 it will hang until the first transaction ends. Then it will
60 succeed or fail depending on what the first transaction
61 did). This is standard transactional behaviour.
62
63 However, another transaction may have updated our row
64 between the time we first fetched the data and the time we
65 start the update transaction. This is noticed by getting the
66 XMIN system column for the row when initially fetching the
67 data and using that value as a where condition value when
68 updating the row later. If the row had been updated (xmin
69 changed) or deleted (primary key disappeared) in the
70 meantime the update will touch zero rows (as no row with
71 both PK and XMIN matching is found) even if the query itself
72 syntactically succeeds.
73
74 When detecting a change in a row due to XMIN being different
75 one needs to be careful how to represent that to the user.
76 The row may simply have changed but it also might have been
77 deleted and a completely new and unrelated row which happens
78 to have the same primary key might have been created ! This
79 row might relate to a totally different context (eg. patient,
80 episode, encounter).
81
82 One can offer all the data to the user:
83
84 self.original_payload
85 - contains the data at the last successful refetch
86
87 self.modified_payload
88 - contains the modified payload just before the last
89 failure of save_payload() - IOW what is currently
90 in the database
91
92 self._payload
93 - contains the currently active payload which may or
94 may not contain changes
95
96 For discussion on this see the thread starting at:
97
98 http://archives.postgresql.org/pgsql-general/2004-10/msg01352.php
99
100 and here
101
102 http://groups.google.com/group/pgsql.general/browse_thread/thread/e3566ba76173d0bf/6cf3c243a86d9233
103 (google for "XMIN semantic at peril")
104
105 Problem cases with XMIN:
106
107 1) not unlikely
108 - a very old row is read with XMIN
109 - vacuum comes along and sets XMIN to FrozenTransactionId
110 - now XMIN changed but the row actually didn't !
111 - an update with "... where xmin = old_xmin ..." fails
112 although there is no need to fail
113
114 2) quite unlikely
115 - a row is read with XMIN
116 - a long time passes
117 - the original XMIN gets frozen to FrozenTransactionId
118 - another writer comes along and changes the row
119 - incidentally the exact same old row gets the old XMIN *again*
120 - now XMIN is (again) the same but the data changed !
121 - a later update fails to detect the concurrent change !!
122
123 TODO:
124 The solution is to use our own column for optimistic locking
125 which gets updated by an AFTER UPDATE trigger.
126 """
127
128 __author__ = "K.Hilbert <Karsten.Hilbert@gmx.net>"
129 __license__ = "GPL v2 or later"
130
131
132 import sys
133 import types
134 import inspect
135 import logging
136 import datetime
137
138
139 if __name__ == '__main__':
140 sys.path.insert(0, '../../')
141 from Gnumed.pycommon import gmExceptions
142 from Gnumed.pycommon import gmPG2
143 from Gnumed.pycommon.gmTools import tex_escape_string
144
145
146 _log = logging.getLogger('gm.db')
147
149 """Represents business objects in the database.
150
151 Rules:
152 - instances ARE ASSUMED TO EXIST in the database
153 - PK construction (aPK_obj): DOES verify its existence on instantiation
154 (fetching data fails)
155 - Row construction (row): allowed by using a dict of pairs
156 field name: field value (PERFORMANCE improvement)
157 - does NOT verify FK target existence
158 - does NOT create new entries in the database
159 - does NOT lazy-fetch fields on access
160
161 Class scope SQL commands and variables:
162
163 <_cmd_fetch_payload>
164 - must return exactly one row
165 - where clause argument values are expected
166 in self.pk_obj (taken from __init__(aPK_obj))
167 - must return xmin of all rows that _cmds_store_payload
168 will be updating, so views must support the xmin columns
169 of their underlying tables
170
171 <_cmds_store_payload>
172 - one or multiple "update ... set ... where xmin_* = ..." statements
173 which actually update the database from the data in self._payload,
174 - the last query must refetch at least the XMIN values needed to detect
175 concurrent updates, their field names had better be the same as
176 in _cmd_fetch_payload,
177 - when subclasses tend to live a while after save_payload() was
178 called and they support computed fields (say, _(some_column)
179 you need to return *all* columns (see cEncounter)
180
181 <_updatable_fields>
182 - a list of fields available for update via object['field']
183
184
185 A template for new child classes:
186
187 *********** start of template ***********
188
189 #------------------------------------------------------------
190 from Gnumed.pycommon import gmBusinessDBObject
191 from Gnumed.pycommon import gmPG2
192
193 #============================================================
194 # short description
195 #------------------------------------------------------------
196 # use plural form, search-replace get_XXX
197 _SQL_get_XXX = u\"""
198 SELECT *, (xmin AS xmin_XXX)
199 FROM XXX.v_XXX
200 WHERE %s
201 \"""
202
203 class cXxxXxx(gmBusinessDBObject.cBusinessDBObject):
204 \"""Represents ...\"""
205
206 _cmd_fetch_payload = _SQL_get_XXX % u"pk_XXX = %s"
207 _cmds_store_payload = [
208 u\"""
209 -- typically the underlying table name
210 UPDATE xxx.xxx SET
211 -- typically "table_col = %(view_col)s"
212 xxx = %(xxx)s,
213 xxx = gm.nullify_empty_string(%(xxx)s)
214 WHERE
215 pk = %(pk_XXX)s
216 AND
217 xmin = %(xmin_XXX)s
218 RETURNING
219 pk as pk_XXX,
220 xmin as xmin_XXX
221 \"""
222 ]
223 # view columns that can be updated:
224 _updatable_fields = [
225 u'xxx',
226 u'xxx'
227 ]
228 #--------------------------------------------------------
229 def format(self):
230 return u'%s' % self
231
232 #------------------------------------------------------------
233 def get_XXX(order_by=None):
234 if order_by is None:
235 order_by = u'true'
236 else:
237 order_by = u'true ORDER BY %s' % order_by
238
239 cmd = _SQL_get_XXX % order_by
240 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = True)
241 return [ cXxxXxx(row = {'data': r, 'idx': idx, 'pk_field': 'xxx'}) for r in rows ]
242 #------------------------------------------------------------
243 def create_xxx(xxx=None, xxx=None):
244
245 args = {
246 u'xxx': xxx,
247 u'xxx': xxx
248 }
249 cmd = u\"""
250 INSERT INTO xxx.xxx (
251 xxx,
252 xxx,
253 xxx
254 ) VALUES (
255 %(xxx)s,
256 %(xxx)s,
257 gm.nullify_empty_string(%(xxx)s)
258 )
259 RETURNING pk
260 \"""
261 rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = True, get_col_idx = False)
262
263 return cXxxXxx(aPK_obj = rows[0]['pk'])
264 #------------------------------------------------------------
265 def delete_xxx(xxx=None):
266 args = {'pk': xxx}
267 cmd = u"DELETE FROM xxx.xxx WHERE pk = %(pk)s"
268 gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}])
269 return True
270 #------------------------------------------------------------
271
272 *********** end of template ***********
273
274 """
275
276 - def __init__(self, aPK_obj=None, row=None):
277 """Init business object.
278
279 Call from child classes:
280
281 super(cChildClass, self).__init__(aPK_obj = aPK_obj, row = row)
282 """
283
284
285
286 self.pk_obj = '<uninitialized>'
287 self._idx = {}
288 self._payload = []
289 self._ext_cache = {}
290 self._is_modified = False
291
292
293 self.__class__._cmd_fetch_payload
294 self.__class__._cmds_store_payload
295 self.__class__._updatable_fields
296
297 if aPK_obj is not None:
298 self.__init_from_pk(aPK_obj=aPK_obj)
299 else:
300 self._init_from_row_data(row=row)
301
302 self._is_modified = False
303
305 """Creates a new clinical item instance by its PK.
306
307 aPK_obj can be:
308 - a simple value
309 * the primary key WHERE condition must be
310 a simple column
311 - a dictionary of values
312 * the primary key where condition must be a
313 subselect consuming the dict and producing
314 the single-value primary key
315 """
316 self.pk_obj = aPK_obj
317 result = self.refetch_payload()
318 if result is True:
319 self.original_payload = {}
320 for field in self._idx.keys():
321 self.original_payload[field] = self._payload[self._idx[field]]
322 return True
323
324 if result is False:
325 raise gmExceptions.ConstructorError, "[%s:%s]: error loading instance" % (self.__class__.__name__, self.pk_obj)
326
328 """Creates a new clinical item instance given its fields.
329
330 row must be a dict with the fields:
331 - pk_field: the name of the primary key field
332 - idx: a dict mapping field names to position
333 - data: the field values in a list (as returned by
334 cursor.fetchone() in the DB-API)
335
336 row = {'data': row, 'idx': idx, 'pk_field': 'the PK column name'}
337
338 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True)
339 objects = [ cChildClass(row = {'data': r, 'idx': idx, 'pk_field': 'the PK column name'}) for r in rows ]
340 """
341 try:
342 self._idx = row['idx']
343 self._payload = row['data']
344 self.pk_obj = self._payload[self._idx[row['pk_field']]]
345 except:
346 _log.exception('faulty <row> argument structure: %s' % row)
347 raise gmExceptions.ConstructorError, "[%s:??]: error loading instance from row data" % self.__class__.__name__
348
349 if len(self._idx.keys()) != len(self._payload):
350 _log.critical('field index vs. payload length mismatch: %s field names vs. %s fields' % (len(self._idx.keys()), len(self._payload)))
351 _log.critical('faulty <row> argument structure: %s' % row)
352 raise gmExceptions.ConstructorError, "[%s:??]: error loading instance from row data" % self.__class__.__name__
353
354 self.original_payload = {}
355 for field in self._idx.keys():
356 self.original_payload[field] = self._payload[self._idx[field]]
357
359 if self.__dict__.has_key('_is_modified'):
360 if self._is_modified:
361 _log.critical('[%s:%s]: loosing payload changes' % (self.__class__.__name__, self.pk_obj))
362 _log.debug('original: %s' % self.original_payload)
363 _log.debug('modified: %s' % self._payload)
364
366 tmp = []
367 try:
368 for attr in self._idx.keys():
369 if self._payload[self._idx[attr]] is None:
370 tmp.append(u'%s: NULL' % attr)
371 else:
372 tmp.append('%s: >>%s<<' % (attr, self._payload[self._idx[attr]]))
373 return '[%s:%s]: %s' % (self.__class__.__name__, self.pk_obj, str(tmp))
374 except:
375 return 'nascent [%s @ %s], cannot show payload and primary key' %(self.__class__.__name__, id(self))
376
378
379
380
381 try:
382 return self._payload[self._idx[attribute]]
383 except KeyError:
384 pass
385
386
387 getter = getattr(self, 'get_%s' % attribute, None)
388 if not callable(getter):
389 _log.warning('[%s]: no attribute [%s]' % (self.__class__.__name__, attribute))
390 _log.warning('[%s]: valid attributes: %s' % (self.__class__.__name__, str(self._idx.keys())))
391 _log.warning('[%s]: no getter method [get_%s]' % (self.__class__.__name__, attribute))
392 methods = filter(lambda x: x[0].startswith('get_'), inspect.getmembers(self, inspect.ismethod))
393 _log.warning('[%s]: valid getter methods: %s' % (self.__class__.__name__, str(methods)))
394 raise KeyError('[%s]: cannot read from key [%s]' % (self.__class__.__name__, attribute))
395
396 self._ext_cache[attribute] = getter()
397 return self._ext_cache[attribute]
398
400
401
402 if attribute in self.__class__._updatable_fields:
403 try:
404 if self._payload[self._idx[attribute]] != value:
405 self._payload[self._idx[attribute]] = value
406 self._is_modified = True
407 return
408 except KeyError:
409 _log.warning('[%s]: cannot set attribute <%s> despite marked settable' % (self.__class__.__name__, attribute))
410 _log.warning('[%s]: supposedly settable attributes: %s' % (self.__class__.__name__, str(self.__class__._updatable_fields)))
411 raise KeyError('[%s]: cannot write to key [%s]' % (self.__class__.__name__, attribute))
412
413
414 if hasattr(self, 'set_%s' % attribute):
415 setter = getattr(self, "set_%s" % attribute)
416 if not callable(setter):
417 raise AttributeError('[%s] setter [set_%s] not callable' % (self.__class__.__name__, attribute))
418 try:
419 del self._ext_cache[attribute]
420 except KeyError:
421 pass
422 if type(value) is types.TupleType:
423 if setter(*value):
424 self._is_modified = True
425 return
426 raise AttributeError('[%s]: setter [%s] failed for [%s]' % (self.__class__.__name__, setter, value))
427 if setter(value):
428 self._is_modified = True
429 return
430
431
432 _log.error('[%s]: cannot find attribute <%s> or setter method [set_%s]' % (self.__class__.__name__, attribute, attribute))
433 _log.warning('[%s]: settable attributes: %s' % (self.__class__.__name__, str(self.__class__._updatable_fields)))
434 methods = filter(lambda x: x[0].startswith('set_'), inspect.getmembers(self, inspect.ismethod))
435 _log.warning('[%s]: valid setter methods: %s' % (self.__class__.__name__, str(methods)))
436 raise AttributeError('[%s]: cannot set [%s]' % (self.__class__.__name__, attribute))
437
438
439
441 raise NotImplementedError('comparison between [%s] and [%s] not implemented' % (self, another_object))
442
444 return self._is_modified
445
447 try:
448 return self._idx.keys()
449 except AttributeError:
450 return 'nascent [%s @ %s], cannot return keys' %(self.__class__.__name__, id(self))
451
454
455 - def fields_as_dict(self, date_format='%c', none_string=u'', escape_style=None, bool_strings=None):
456 if bool_strings is None:
457 bools = {True: u'true', False: u'false'}
458 else:
459 bools = {True: bool_strings[0], False: bool_strings[1]}
460 data = {}
461 for field in self._idx.keys():
462
463
464
465
466 val = self._payload[self._idx[field]]
467 if val is None:
468 data[field] = none_string
469 continue
470 if isinstance(val, bool):
471 data[field] = bools[val]
472 continue
473 if isinstance(val, datetime.datetime):
474 try:
475 data[field] = val.strftime(date_format).decode('utf8', 'replace')
476 except ValueError:
477 data[field] = val.isoformat()
478 if escape_style in [u'latex', u'tex']:
479 data[field] = tex_escape_string(data[field])
480 continue
481 try:
482 data[field] = unicode(val, encoding = 'utf8', errors = 'replace')
483 except TypeError:
484 try:
485 data[field] = unicode(val)
486 except (UnicodeDecodeError, TypeError):
487 val = '%s' % str(val)
488 data[field] = val.decode('utf8', 'replace')
489 if escape_style in [u'latex', u'tex']:
490 data[field] = tex_escape_string(data[field])
491
492 return data
493
495 _log.error('[%s:%s]: forgot to override get_patient()' % (self.__class__.__name__, self.pk_obj))
496 return None
497
500
502 """Fetch field values from backend.
503 """
504 if self._is_modified:
505 if ignore_changes:
506 _log.critical('[%s:%s]: loosing payload changes' % (self.__class__.__name__, self.pk_obj))
507 _log.debug('original: %s' % self.original_payload)
508 _log.debug('modified: %s' % self._payload)
509 else:
510 _log.critical('[%s:%s]: cannot reload, payload changed' % (self.__class__.__name__, self.pk_obj))
511 return False
512
513 if type(self.pk_obj) == types.DictType:
514 arg = self.pk_obj
515 else:
516 arg = [self.pk_obj]
517 rows, self._idx = gmPG2.run_ro_queries (
518 queries = [{'cmd': self.__class__._cmd_fetch_payload, 'args': arg}],
519 get_col_idx = True
520 )
521 if len(rows) == 0:
522 _log.error('[%s:%s]: no such instance' % (self.__class__.__name__, self.pk_obj))
523 return False
524 self._payload = rows[0]
525 return True
526
529
530 - def save(self, conn=None):
532
534 """Store updated values (if any) in database.
535
536 Optionally accepts a pre-existing connection
537 - returns a tuple (<True|False>, <data>)
538 - True: success
539 - False: an error occurred
540 * data is (error, message)
541 * for error meanings see gmPG2.run_rw_queries()
542 """
543 if not self._is_modified:
544 return (True, None)
545
546 args = {}
547 for field in self._idx.keys():
548 args[field] = self._payload[self._idx[field]]
549 self.modified_payload = args
550
551 close_conn = self.__noop
552 if conn is None:
553 conn = gmPG2.get_connection(readonly=False)
554 close_conn = conn.close
555
556
557
558
559
560
561 queries = []
562 for query in self.__class__._cmds_store_payload:
563 queries.append({'cmd': query, 'args': args})
564 rows, idx = gmPG2.run_rw_queries (
565 link_obj = conn,
566 queries = queries,
567 return_data = True,
568 get_col_idx = True
569 )
570
571
572
573
574
575 if len(rows) == 0:
576 return (False, (u'cannot update row', _('[%s:%s]: row not updated (nothing returned), row in use ?') % (self.__class__.__name__, self.pk_obj)))
577
578
579 row = rows[0]
580 for key in idx:
581 try:
582 self._payload[self._idx[key]] = row[idx[key]]
583 except KeyError:
584 conn.rollback()
585 close_conn()
586 _log.error('[%s:%s]: cannot update instance, XMIN refetch key mismatch on [%s]' % (self.__class__.__name__, self.pk_obj, key))
587 _log.error('payload keys: %s' % str(self._idx))
588 _log.error('XMIN refetch keys: %s' % str(idx))
589 _log.error(args)
590 raise
591
592 conn.commit()
593 close_conn()
594
595 self._is_modified = False
596
597 self.original_payload = {}
598 for field in self._idx.keys():
599 self.original_payload[field] = self._payload[self._idx[field]]
600
601 return (True, None)
602
603
605
606 """ turn the data into a list of dicts, adding "class hints".
607 all objects get turned into dictionaries which the other end
608 will interpret as "object", via the __jsonclass__ hint,
609 as specified by the JSONRPC protocol standard.
610 """
611 if isinstance(obj, list):
612 return map(jsonclasshintify, obj)
613 elif isinstance(obj, gmPG2.dbapi.tz.FixedOffsetTimezone):
614
615
616 res = {'__jsonclass__': ["jsonobjproxy.FixedOffsetTimezone"]}
617 res['name'] = obj._name
618 res['offset'] = jsonclasshintify(obj._offset)
619 return res
620 elif isinstance(obj, datetime.timedelta):
621
622
623 res = {'__jsonclass__': ["jsonobjproxy.TimeDelta"]}
624 res['days'] = obj.days
625 res['seconds'] = obj.seconds
626 res['microseconds'] = obj.microseconds
627 return res
628 elif isinstance(obj, datetime.time):
629
630
631 res = {'__jsonclass__': ["jsonobjproxy.Time"]}
632 res['hour'] = obj.hour
633 res['minute'] = obj.minute
634 res['second'] = obj.second
635 res['microsecond'] = obj.microsecond
636 res['tzinfo'] = jsonclasshintify(obj.tzinfo)
637 return res
638 elif isinstance(obj, datetime.datetime):
639
640
641 res = {'__jsonclass__': ["jsonobjproxy.DateTime"]}
642 res['year'] = obj.year
643 res['month'] = obj.month
644 res['day'] = obj.day
645 res['hour'] = obj.hour
646 res['minute'] = obj.minute
647 res['second'] = obj.second
648 res['microsecond'] = obj.microsecond
649 res['tzinfo'] = jsonclasshintify(obj.tzinfo)
650 return res
651 elif isinstance(obj, cBusinessDBObject):
652
653
654 res = {'__jsonclass__': ["jsonobjproxy.%s" % obj.__class__.__name__]}
655 for k in obj.get_fields():
656 t = jsonclasshintify(obj[k])
657 res[k] = t
658 print "props", res, dir(obj)
659 for attribute in dir(obj):
660 if not attribute.startswith("get_"):
661 continue
662 k = attribute[4:]
663 if res.has_key(k):
664 continue
665 getter = getattr(obj, attribute, None)
666 if callable(getter):
667 res[k] = jsonclasshintify(getter())
668 return res
669 return obj
670
671
672 if __name__ == '__main__':
673
674 if len(sys.argv) < 2:
675 sys.exit()
676
677 if sys.argv[1] != u'test':
678 sys.exit()
679
680
691
692 from Gnumed.pycommon import gmI18N
693 gmI18N.activate_locale()
694 gmI18N.install_domain()
695
696 data = {
697 'pk_field': 'bogus_pk',
698 'idx': {'bogus_pk': 0, 'bogus_field': 1, 'bogus_date': 2},
699 'data': [-1, 'bogus_data', datetime.datetime.now()]
700 }
701 obj = cTestObj(row=data)
702
703
704
705 print obj.fields_as_dict()
706
707
708