./gnumed-server.19.6/server/0000755000175000017500000000000012272210420014014 5ustar ncqncq./gnumed-server.19.6/server/gm-move_backups_offsite.sh0000755000175000017500000000452412272210420021156 0ustar ncqncq#!/bin/bash #============================================================== # # This script can be used to move backups to another host, IOW # storing them "offsite" in the loosest sense of the word. # # # Imagine the following situation: # # 1) a laptop running client and database which is # taken to the office, to patients, etc # 2) a desktop at home with some spare storage # 3) the laptop is occasionally connected to the home # network and thus has access to the desktop machine # # One could add the following two lines to the cron # script on the laptop to make sure database backups # are replicated to the desktop whenever the laptop # has access to it: # # @reboot /usr/bin/gm-move_backups_offsite.sh # 5 0-23 * * * /usr/bin/gm-move_backups_offsite.sh # # author: Karsten Hilbert # license: GPL v2 or later #============================================================== CONF="/etc/gnumed/gnumed-backup.conf" #============================================================== # There really should not be any need to # change anything below this line. #============================================================== # load config file if [ -r ${CONF} ] ; then . ${CONF} else echo "Cannot read configuration file ${CONF}. Aborting." exit 1 fi # sanity check if [ ! -d "${BACKUP_DIR}" ] ; then mkdir "${BACKUP_DIR}" fi LOG="${BACKUP_DIR}/backup.log" HOST=`hostname` BACKUP_FILE_GLOB="backup-*.bz2" # do not run concurrently if test "`ps ax | grep $0 | grep -v grep | grep -v $$`" != "" ; then echo "${HOST}: "`date`": transfer already in progress, exiting" >> ${LOG} exit fi # setup rsync arguments ARGS="--quiet --archive --partial" if test -n ${MAX_OFFSITING_BANDWITH} ; then ARGS="${ARGS} --bwlimit=${MAX_OFFSITING_BANDWIDTH}" fi if test "${OFFSITE_BY_CRC}" = "yes" ; then ARGS="${ARGS} --checksum" fi echo "$HOST: "`date`": attempting backup (rsync ${ARGS}) to ${OFFSITE_BACKUP_HOST}:${OFFSITE_BACKUP_DIR}" >> $LOG if ping -c 3 -i 2 $OFFSITE_BACKUP_HOST > /dev/null; then if rsync ${ARGS} ${BACKUP_DIR}/${BACKUP_FILE_GLOB} ${OFFSITE_BACKUP_HOST}:${OFFSITE_BACKUP_DIR} ; then echo "$HOST: "`date`": success" >> $LOG else echo "$HOST: "`date`": failure: cannot transfer files" >> $LOG fi else echo "$HOST: "`date`": failure: cannot reach target host" >> $LOG fi #============================================================== ./gnumed-server.19.6/server/gm-remove_person.sh0000755000175000017500000000445712272210420017651 0ustar ncqncq#!/bin/bash #============================================================== # This script can be used to remove a person # from a GNUmed database. # # author: Karsten Hilbert # license: GPL v2 or later #============================================================== SQL_FILE="/tmp/gm-remove_person.sql" #============================================================== # There really should not be any need to # change anything below this line. #============================================================== TARGET_DB="$1" PERSON_PK="$2" # You will need to understand what this does # before exerting the power of setting it. # # You may want to start studying here: # # http://en.wikipedia.org/wiki/Database_transaction # # Use the Source, Luke. END_TX="$3" if test -z ${PERSON_PK} ; then echo "=============================================================" echo "usage: $0 " echo "" echo " : a GNUmed database (such as \"gnumed_vNN\")" echo " : primary key of a person in that database" echo "=============================================================" exit 1 fi if test -z ${END_TX} ; then END_TX="rollback" fi echo "" echo "Creating removal script ..." echo " ${SQL_FILE}" echo "" > $SQL_FILE ( cat <<-EOF -- GNUmed person removal script \set ON_ERROR_STOP 1 set default_transaction_read_only to off; begin; select dem.remove_person(${PERSON_PK}); ${END_TX}; EOF ) >> $SQL_FILE echo "" echo "Are you sure you want to remove the person #${PERSON_PK}" echo "*irrevocably* from the database \"${TARGET_DB}\" ?" echo "" read -e -p "Remove ? [yes / NO]: " if test "$REPLY" == "yes"; then echo "" echo "Removing person #${PERSON_PK} from database \"${TARGET_DB}\" ..." LOG="gm-remove_person.log" psql -a -U gm-dbo -d ${TARGET_DB} -f ${SQL_FILE} &> ${LOG} if test $? -ne 0 ; then echo "ERROR: failed to remove person." echo " see: ${LOG}" echo "" echo "-----------------------------------------------------" cat ${SQL_FILE} >> ${LOG} exit 1 fi if test "${END_TX}" != "commit"; then echo "" echo "This test seems fine. You should be good to go for real." echo "Learn about END_TX from the source of this script at:" echo "" echo $0 echo "" fi fi rm ${SQL_FILE} #============================================================== ./gnumed-server.19.6/server/gm-upgrade_server0000755000175000017500000000143612272210420017364 0ustar ncqncq#!/bin/sh #============================================================== # This wrapper is intended to be installed into a systemwide # admin-only executable directory such as "/usr/sbin/" # # It should be run as root and will call the upstream script # to actually upgrade an existing database. # # author: Andreas Tille, Karsten Hilbert # license: GPL v2 or later # #============================================================== # those probably need some adjustment from package maintainers: GM_LOG_BASE="/var/log/gnumed/server" GM_SERVER_DIR="/var/lib/gnumed/server/bootstrap" #============================================================== cd ${GM_SERVER_DIR} mkdir -p ${GM_LOG_BASE} export GM_LOG_BASE ./upgrade-db.sh $@ #============================================================== ./gnumed-server.19.6/server/__init__.py0000644000175000017500000000000112272210420016114 0ustar ncqncq ./gnumed-server.19.6/server/gm-backup_database.sh0000755000175000017500000001524412272210420020053 0ustar ncqncq#!/bin/bash #============================================================== # # This script creates an uncompressed, plain text (SQL) backup # of the database schema, data, and roles which can be used to # restore a GNUmed database from scratch with psql. # # You need to allow root to access the GNUmed database as # user "gm-dbo" by either editing pg_hba.conf or using a # .pgpass file. # # # anacron # ------- # The following line could be added to a system's # /etc/anacrontab to make sure it creates daily # database backups for GNUmed: # # 1 15 backup-gnumed- /usr/bin/gm-backup_database.sh # # # cron # ---- # add the following line to a crontab file to run a # database backup at 12:47 and 19:47 every day # # 47 12,19 * * * * /usr/bin/gm-backup_database.sh # # author: Karsten Hilbert # license: GPL v2 or later #============================================================== # Keep this properly updated to refer to the # database you want to currently backup. CONF="/etc/gnumed/gnumed-backup.conf" #============================================================== # There really should not be any need to # change anything below this line. #============================================================== # load config file if [ -r ${CONF} ] ; then . ${CONF} else echo "Cannot read configuration file ${CONF}. Aborting." exit 1 fi # switched off ? (database name empty) if [ "$GM_DATABASE" = "" ] ; then exit 0 fi # FIXME: check PORT/DBO/BACKUP_FILENAME too # sanity check # (his does not work on Mac, so you # may need to comment this out) if ! su -c "psql -t -l -p ${GM_PORT}" -l postgres | grep -q "^[[:space:]]*${GM_DATABASE}" ; then echo "The configuration in ${CONF} is set to backup" echo "the GNUmed database ${GM_DATABASE}. This" echo "database does not exist, however. Aborting." exit 1 fi # are we backing up the latest DB ? OUR_VER=`echo ${GM_DATABASE} | cut -f 2 -d v` if test -z ${GM_HOST} ; then HAS_HIGHER_VER=`sudo -u postgres psql -A -t -d ${GM_DATABASE} -p ${GM_PORT} -c "SELECT exists (select 1 from pg_database where datname like 'gnumed_v%' and substring(datname from 9 for 3)::integer > '${OUR_VER}');"` else HAS_HIGHER_VER=`sudo -u postgres psql -A -t -h ${GM_HOST} -d ${GM_DATABASE} -p ${GM_PORT} -c "SELECT exists (select 1 from pg_database where datname like 'gnumed_v%' and substring(datname from 9 for 3)::integer > '${OUR_VER}');"` fi; if test "${HAS_HIGHER_VER}" = "t" ; then echo "Backing up database ${GM_DATABASE}." echo "" echo "However, a newer database seems to exist:" echo "" sudo -u postgres psql -l -p ${GM_PORT} | grep gnumed_v echo "" echo "Make sure you really want to backup the older database !" fi ; # generate backup file name TS=`date +%Y-%m-%d-%H-%M-%S` if test -z ${GM_HOST} ; then BACKUP_BASENAME="backup-${GM_DATABASE}-${INSTANCE_OWNER}-"`hostname` else BACKUP_BASENAME="backup-${GM_DATABASE}-${INSTANCE_OWNER}-${GM_HOST}" fi ; BACKUP_FILENAME="${BACKUP_BASENAME}-${TS}" cd ${BACKUP_DIR} if test "$?" != "0" ; then echo "Cannot change into backup directory [${BACKUP_DIR}]. Aborting." exit 1 fi # create dumps if test -z ${GM_HOST} ; then # locally # -r -> -g for older versions sudo -u postgres pg_dumpall -r -v -p ${GM_PORT} > ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- -----------------------------------------------------" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- Below find a list of database roles which were in use" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- in the GNUmed database \"${GM_DATABASE}\"." >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "--" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- Only those need to be restored to create a working" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- copy of your original database. All other roles can" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- be commented out by prepending '-- ' to the relevant" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- lines above." >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- -----------------------------------------------------" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null ROLES=`psql -A -t -d ${GM_DATABASE} -p ${GM_PORT} -U ${GM_DBO} -c "select gm.get_users('${GM_DATABASE}');"` echo "-- ${ROLES}" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null pg_dump -C -v -p ${GM_PORT} -U ${GM_DBO} -f ${BACKUP_FILENAME}-database.sql ${GM_DATABASE} 2> /dev/null else # remotely if ping -c 3 -i 2 ${GM_HOST} > /dev/null; then # -r -> -g for older versions pg_dumpall -r -v -h ${GM_HOST} -p ${GM_PORT} -U postgres > ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- -----------------------------------------------------" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- Below find a list of database roles which were in use" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- in the GNUmed database \"${GM_DATABASE}\"." >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "--" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- Only those need to be restored to create a working" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- copy of your original database. All other roles can" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- be commented out by prepending '-- ' to the relevant" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- lines above." >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "-- -----------------------------------------------------" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null echo "" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null ROLES=`psql -A -t -d ${GM_DATABASE} -p ${GM_PORT} -U ${GM_DBO} -c "select gm.get_users('${GM_DATABASE}');"` echo "-- ${ROLES}" >> ${BACKUP_FILENAME}-roles.sql 2> /dev/null pg_dump -C -v -h ${GM_HOST} -p ${GM_PORT} -U ${GM_DBO} -f ${BACKUP_FILENAME}-database.sql ${GM_DATABASE} 2> /dev/null else echo "Cannot ping database host ${GM_HOST}." exit 1 fi ; fi ; # tar and test it if test -z ${VERIFY_TAR} ; then tar -cf ${BACKUP_FILENAME}.tar ${BACKUP_FILENAME}-database.sql ${BACKUP_FILENAME}-roles.sql else tar -cWf ${BACKUP_FILENAME}.tar ${BACKUP_FILENAME}-database.sql ${BACKUP_FILENAME}-roles.sql fi ; if test "$?" != "0" ; then echo "Creating backup tar archive [${BACKUP_FILENAME}.tar] failed. Aborting." exit 1 fi rm -f ${BACKUP_FILENAME}-database.sql rm -f ${BACKUP_FILENAME}-roles.sql chown ${BACKUP_OWNER} ${BACKUP_FILENAME}.tar exit 0 #============================================================== ./gnumed-server.19.6/server/gm-set_gm-dbo_password0000755000175000017500000000073212272210420020307 0ustar ncqncq#!/bin/sh #============================================================== # author: Karsten Hilbert # license: GPL v2 or later # # This wrapper allows setting the password for gm-dbo. # # It must be run as root. # #============================================================== PASSWD="$1" SQL="ALTER ROLE \\\"gm-dbo\\\" ENCRYPTED PASSWORD '${PASSWD}';" su -c "psql -d template1 -c \"${SQL}\"" -l postgres #============================================================== ./gnumed-server.19.6/server/gm-zip+sign_backups.sh0000755000175000017500000000570112272210420020225 0ustar ncqncq#!/bin/bash #============================================================== # author: Karsten Hilbert # license: GPL v2 or later # # anacron # ------- # The following line could be added to a system's # /etc/anacrontab to make sure it creates daily # database backups for GNUmed: # # 1 15 gnumed--sign-backups /usr/bin/gm-zip+sign_backups.sh # # # cron # ---- # Add the following line to a crontab file to sign # database backups at 12:47 and 19:47 every day: # # 47 12,19 * * * * /usr/bin/gm-zip+sign_backups.sh # # # It is useful to have a PROCMAIL rule for the GNotary server replies # piping them into the stoarage area where the backups are kept. #============================================================== CONF="/etc/gnumed/gnumed-backup.conf" #============================================================== # There really should not be any need to # change anything below this line. #============================================================== # load config file if [ -r ${CONF} ] ; then . ${CONF} else echo "Cannot read configuration file ${CONF}. Aborting." exit 1 fi TS=`date +%Y-%m-%d-%H-%M-%S` BACKUP_BASENAME="backup-${GM_DATABASE}-${INSTANCE_OWNER}" cd ${BACKUP_DIR} if test "$?" != "0" ; then echo "Cannot change into backup directory [${BACKUP_DIR}]. Aborting." exit 1 fi shopt -s -q nullglob # zip up any backups for BACKUP in ${BACKUP_BASENAME}-*.tar ; do # are the backup and ... TAR_OPEN=`lsof | grep ${BACKUP}` # ... the corresponding bz2 both open at the moment ? BZ2_OPEN=`lsof | grep ${BACKUP}.bz2` if test -z "${TAR_OPEN}" -a -z "${BZ2_OPEN}" ; then # no: remove the bz2 and start over compressing rm -f ${BACKUP}.bz2 else # yes: skip to next backup continue fi # I have tried "xz -9 -e" and it did not make much of # a difference (48 MB in a 1.2 GB backup) bzip2 -zq -${COMPRESSION_LEVEL} ${BACKUP} bzip2 -tq ${BACKUP}.bz2 # FIXME: add check for exit code chmod ${BACKUP_MASK} ${BACKUP}.bz2 chown ${BACKUP_OWNER} ${BACKUP}.bz2 # Reed-Solomon error protection support # if test -n ${ADD_ECC} ; then # rsbep # fi # GNotary support if test -n ${GNOTARY_TAN} ; then LOCAL_MAILER=`which mail` #SHA512="SHA 512:"`sha512sum -b ${BACKUP_FILENAME}.tar.bz2` SHA512=`openssl dgst -sha512 -hex ${BACKUP}.bz2` RMD160=`openssl dgst -ripemd160 -hex ${BACKUP}.bz2` export REPLYTO=${SIG_RECEIVER} # send mail ( echo " " echo "" echo "" echo " $GNOTARY_TAN" echo " notarize" echo " " echo " ${SHA512}" echo " ${RMD160}" echo " " echo "" echo " " ) | $LOCAL_MAILER -s "gnotarize" $GNOTARY_SERVER fi done exit 0 #============================================================== ./gnumed-server.19.6/server/pycommon/0000755000175000017500000000000012272210425015662 5ustar ncqncq./gnumed-server.19.6/server/pycommon/gmLog2.py0000644000175000017500000002012012272210420017351 0ustar ncqncq"""GNUmed logging framework setup. All error logging, user notification and otherwise unhandled exception handling should go through classes or functions of this module. Theory of operation: This module tailors the standard logging framework to the needs of GNUmed. By importing gmLog2 into your code you'll get the root logger send to a unicode file with messages in a format useful for debugging. The filename is either taken from the command line (--log-file=...) or derived from the name of the main application. The log file will be found in one of the following standard locations: 1) given on the command line as "--log-file=LOGFILE" 2) ~/./.log 3) /dir/of/binary/.log (mainly for DOS/Windows) where is derived from the name of the main application. If you want to specify just a directory for the log file you must end the --log-file definition with a slash. By importing "logging" and getting a logger your modules never need to worry about the real message destination or whether at any given time there's a valid logger available. Your MAIN module simply imports gmLog2 and all other modules will merrily and automagically start logging away. """ # TODO: # - exception() # - ascii_ctrl2mnemonic() #======================================================================== __author__ = "K. Hilbert " __license__ = "GPL v2 or later (details at http://www.gnu.org)" # stdlib import logging import sys import os import codecs import locale _logfile_name = None _logfile = None _string_encoding = None # table used for cooking non-printables AsciiName = ['<#0-0x00-nul>', '<#1-0x01-soh>', '<#2-0x02-stx>', '<#3-0x03-etx>', '<#4-0x04-eot>', '<#5-0x05-enq>', '<#6-0x06-ack>', '<#7-0x07-bel>', '<#8-0x08-bs>', '<#9-0x09-ht>', '<#10-0x0A-lf>', '<#11-0x0B-vt>', '<#12-0x0C-ff>', '<#13-0x0D-cr>', '<#14-0x0E-so>', '<#15-0x0F-si>', '<#16-0x10-dle>', '<#17-0x11-dc1/xon>', '<#18-0x12-dc2>', '<#19-0x13-dc3/xoff>', '<#20-0x14-dc4>', '<#21-0x15-nak>', '<#22-0x16-syn>', '<#23-0x17-etb>', '<#24-0x18-can>', '<#25-0x19-em>', '<#26-0x1A-sub>', '<#27-0x1B-esc>', '<#28-0x1C-fs>', '<#29-0x1D-gs>', '<#30-0x1E-rs>', '<#31-0x1F-us>' ] # msg = reduce(lambda x, y: x+y, (map(self.__char2AsciiName, list(tmp))), '') # # def __char2AsciiName(self, aChar): # try: # return AsciiName[ord(aChar)] # except IndexError: # return aChar # # def __tracestack(self): # """extract data from the current execution stack # # this is rather fragile, I guess # """ # stack = traceback.extract_stack() # self.__modulename = stack[-4][0] # self.__linenumber = stack[-4][1] # self.__functionname = stack[-4][2] # if (self.__functionname == "?"): # self.__functionname = "Main" #=============================================================== # external API #=============================================================== def flush(): logger = logging.getLogger('gm.logging') logger.critical(u'-------- synced log file -------------------------------') root_logger = logging.getLogger() for handler in root_logger.handlers: handler.flush() #=============================================================== def log_stack_trace(message=None): logger = logging.getLogger('gm.logging') tb = sys.exc_info()[2] if tb is None: try: tb = sys.last_traceback except AttributeError: logger.debug(u'no stack to trace') return # recurse back to root caller while 1: if not tb.tb_next: break tb = tb.tb_next # and put the frames on a stack stack_of_frames = [] frame = tb.tb_frame while frame: stack_of_frames.append(frame) frame = frame.f_back stack_of_frames.reverse() if message is not None: logger.debug(message) logger.debug(u'stack trace follows:') logger.debug(u'(locals by frame, outmost frame first)') for frame in stack_of_frames: logger.debug ( u'>>> execution frame [%s] in [%s] at line %s <<<', frame.f_code.co_name, frame.f_code.co_filename, frame.f_lineno ) for varname, value in frame.f_locals.items(): if varname == u'__doc__': continue try: value = unicode(value, encoding = _string_encoding, errors = 'replace') except TypeError: try: value = unicode(value) except (UnicodeDecodeError, TypeError): value = '%s' % str(value) value = value.decode(_string_encoding, 'replace') logger.debug(u'%20s = %s', varname, value) #=============================================================== def set_string_encoding(encoding=None): logger = logging.getLogger('gm.logging') global _string_encoding if encoding is not None: codecs.lookup(encoding) _string_encoding = encoding logger.info(u'setting python.str -> python.unicode encoding to <%s> (explicit)', _string_encoding) return True enc = sys.getdefaultencoding() if enc != 'ascii': _string_encoding = enc logger.info(u'setting python.str -> python.unicode encoding to <%s> (sys.getdefaultencoding)', _string_encoding) return True enc = locale.getlocale()[1] if enc is not None: _string_encoding = enc logger.info(u'setting python.str -> python.unicode encoding to <%s> (locale.getlocale)', _string_encoding) return True # FIXME: or rather use utf8 ? _string_encoding = locale.getpreferredencoding(do_setlocale=False) logger.info(u'setting python.str -> python.unicode encoding to <%s> (locale.getpreferredencoding)', _string_encoding) return True #=============================================================== # internal API #=============================================================== def __setup_logging(): set_string_encoding() global _logfile if _logfile is not None: return True if not __get_logfile_name(): return False if sys.version[:3] < '2.5': fmt = u'%(asctime)s %(levelname)-8s %(name)s (%(pathname)s @ #%(lineno)d): %(message)s' else: fmt = u'%(asctime)s %(levelname)-8s %(name)s (%(pathname)s::%(funcName)s() #%(lineno)d): %(message)s' _logfile = codecs.open(filename = _logfile_name, mode = 'wb', encoding = 'utf8', errors = 'replace') logging.basicConfig ( format = fmt, datefmt = '%Y-%m-%d %H:%M:%S', level = logging.DEBUG, stream = _logfile ) logger = logging.getLogger('gm.logging') logger.critical(u'-------- start of logging ------------------------------') logger.info(u'log file is <%s>', _logfile_name) logger.info(u'log level is [%s]', logging.getLevelName(logger.getEffectiveLevel())) logger.info(u'log file encoding is ') logger.info(u'initial python.str -> python.unicode encoding is <%s>', _string_encoding) #--------------------------------------------------------------- def __get_logfile_name(): global _logfile_name if _logfile_name is not None: return _logfile_name def_log_basename = os.path.splitext(os.path.basename(sys.argv[0]))[0] def_log_name = '%s-%s.log' % (def_log_basename, os.getpid()) # given on command line ? for option in sys.argv[1:]: if option.startswith('--log-file='): (name,value) = option.split('=') (dir, name) = os.path.split(value) if dir == '': dir = '.' if name == '': name = def_log_name _logfile_name = os.path.abspath(os.path.expanduser(os.path.join(dir, name))) return True # else store it in ~/.def_log_basename/def_log_name dir = os.path.expanduser(os.path.join('~', '.' + def_log_basename)) try: os.makedirs(dir) except OSError, e: if (e.errno == 17) and not os.path.isdir(dir): raise _logfile_name = os.path.join(dir, def_log_name) return True #=============================================================== # main #--------------------------------------------------------------- __setup_logging() if __name__ == '__main__': #----------------------------------------------------------- def test(): logger = logging.getLogger('gmLog2.test') logger.error("I expected to see %s::test()" % __file__) try: int(None) except: logger.exception(u'unhandled exception') log_stack_trace() flush() #----------------------------------------------------------- if len(sys.argv) > 1 and sys.argv[1] == u'test': test() #=============================================================== ./gnumed-server.19.6/server/pycommon/gmPsql.py0000644000175000017500000001757212272210420017506 0ustar ncqncq# A Python class to replace the PSQL command-line interpreter # NOTE: this is not a full replacement for the interpeter, merely # enough functionality to run gnumed installation scripts # # Copyright (C) 2003, 2004 - 2010 GNUmed developers # Licence: GPL v2 or later #=================================================================== __author__ = "Ian Haywood" __license__ = "GPL v2 or later (details at http://www.gnu.org)" # stdlib import sys, os, string, re, urllib2, logging _log = logging.getLogger('gm.bootstrapper') unformattable_error_id = 12345 #=================================================================== def shellrun (cmd): """ runs the shell command and returns a string """ stdin, stdout = os.popen4 (cmd.group (1)) r = stdout.read () stdout.close() stdin.close() return r #------------------------------------------------------------------- def shell(str): """ performs backtick shell extension in a string """ return re.sub (r"`(.*)`", shellrun, str) #=================================================================== class Psql: def __init__ (self, conn): """ db : the interpreter to connect to, must be a DBAPI compliant interface """ self.conn = conn self.vars = {'ON_ERROR_STOP':None} #--------------------------------------------------------------- def match (self, str): match = re.match (str, self.line) if match is None: ret = 0 else: ret = 1 self.groups = match.groups () return ret #--------------------------------------------------------------- def fmt_msg(self, aMsg): try: tmp = u"%s:%d: %s" % (self.filename, self.lineno-1, aMsg) tmp = tmp.replace(u'\r', u'') tmp = tmp.replace(u'\n', u'') except UnicodeDecodeError: global unformattable_error_id tmp = u"%s:%d: " % (self.filename, self.lineno-1, unformattable_error_id) try: print 'ERROR: GNUmed bootstrap #%d:' % unformattable_error_id print aMsg except: pass unformattable_error_id += 1 return tmp #--------------------------------------------------------------- def run (self, filename): """ filename: a file, containg semicolon-separated SQL commands """ if re.match ("http://.*", filename) or re.match ("ftp://.*", filename) or re.match ("gopher://.*", filename): try: self.file = urllib2.urlopen (filename) except URLError: _log.error(u"cannot access %s" % filename) return 1 else: if os.access (filename, os.R_OK): self.file = open(filename) else: _log.error(u"cannot open file [%s]" % filename) return 1 self.lineno = 0 self.filename = filename in_string = False bracketlevel = 0 curr_cmd = '' curs = self.conn.cursor () # transaction_started = False for self.line in self.file.readlines(): self.lineno += 1 if len(self.line.strip()) == 0: continue # \echo if self.match (r"^\\echo (.*)"): _log.info(self.fmt_msg(shell(self.groups[0]))) continue # \qecho if self.match (r"^\\qecho (.*)"): _log.info(self.fmt_msg(shell (self.groups[0]))) continue # \q if self.match (r"^\\q"): _log.warning(self.fmt_msg(u"script terminated by \\q")) return 0 # \set if self.match (r"^\\set (\S+) (\S+)"): self.vars[self.groups[0]] = shell (self.groups[1]) if self.groups[0] == 'ON_ERROR_STOP': self.vars['ON_ERROR_STOP'] = int (self.vars['ON_ERROR_STOP']) continue # \unset if self.match (r"^\\unset (\S+)"): self.vars[self.groups[0]] = None continue # \connect if self.match (r"^\\connect.*"): _log.error(self.fmt_msg(u"\\connect not yet supported in scripts")) continue # \lo_import if self.match (r"^\\lo_import.*"): _log.error(self.fmt_msg(u"\\lo_import not yet supported")) # no sense to continue here return 1 # \copy ... to ... if self.match (r"^\\copy .* to '(\S+)' .*"): _log.error(self.fmt_msg(u"\\copy to not implemented")) return 1 # \copy ... from ... if self.match (r"^\\copy .* from '(\S+)' .*"): copyfile = self.groups[0] try: copyfd = file (os.path.join (os.path.dirname (self.filename), copyfile)) except error: _log.error(self.fmt_msg(error)) return 1 self.line = self.line[1:].strip() # lop off leading slash self.line.replace ("'%s'" % copyfile, 'stdin') # now we have a command that the backend understands copyline = 0 try: curs = self.conn.cursor () # send the COPY command curs.execute (self.line) # send the data for i in copyfd.readlines (): curs.execute (i) copyline += 1 self.conn.commit () curs.close () except StandardError, error: _log.error(u"%s: %d: %s" % (copyfile, copyline, error)) if self.vars['ON_ERROR_STOP']: return 1 continue # \i if self.match (r"^\\i (\S+)"): # create another interpreter instance in same connection Psql(self.conn).run (os.path.join (os.path.dirname (self.filename), self.groups[0])) continue # \encoding if self.match (r"^\\encoding.*"): _log.error(self.fmt_msg(u"\\encoding not yet supported")) continue # other '\' commands if self.match (r"^\\(.*)") and not in_string: # most other \ commands are for controlling output formats, don't make # much sense in an installation script, so we gently ignore them _log.warning(self.fmt_msg(u"psql command \"\\%s\" being ignored " % self.groups[0])) continue # non-'\' commands this_char = self.line[0] # loop over characters in line for next_char in self.line[1:] + ' ': # start/end of string detected if this_char == "'": in_string = not in_string # detect -- style comments if this_char == '-' and next_char == '-' and not in_string: break # detect bracketing if this_char == '(' and not in_string: bracketlevel += 1 if this_char == ')' and not in_string: bracketlevel -= 1 # found end of command, not inside string, not inside bracket ? if not (not in_string and (bracketlevel == 0) and (this_char == ';')): curr_cmd += this_char else: try: # if curr_cmd.strip ().upper () == 'COMMIT': # if transaction_started: # self.conn.commit () # curs.close () # curs = self.conn.cursor () # _log.debug(self.fmt_msg ("transaction committed")) # else: # _log.warning(self.fmt_msg ("COMMIT without BEGIN: no actual transaction happened!")) # transaction_started = False # elif curr_cmd.strip ().upper () == 'BEGIN': # if transaction_started: # _log.warning(self.fmt_msg ("BEGIN inside transaction")) # else: # transaction_started = True # _log.debug(self.fmt_msg ("starting transaction")) # else: if curr_cmd.strip() != '': if curr_cmd.find('vacuum'): self.conn.commit(); curs.close() old_iso_level = self.conn.isolation_level self.conn.set_isolation_level(0) curs = self.conn.cursor() curs.execute (curr_cmd) self.conn.set_isolation_level(old_iso_level) else: curs.execute (curr_cmd) # if not transaction_started: except StandardError, error: _log.debug(curr_cmd) if re.match (r"^NOTICE:.*", str(error)): _log.warning(self.fmt_msg(error)) else: if self.vars['ON_ERROR_STOP']: _log.error(self.fmt_msg(error)) return 1 else: _log.debug(self.fmt_msg(error)) self.conn.commit() curs.close() curs = self.conn.cursor() curr_cmd = '' this_char = next_char # end of loop over chars # end of loop over lines self.conn.commit() curs.close() return 0 #=================================================================== # testing code if __name__ == '__main__': from pyPgSQL import PgSQL conn = PgSQL.connect (user='gm-dbo', database = 'gnumed') psql = Psql (conn) psql.run (sys.argv[1]) conn.close () #=================================================================== ./gnumed-server.19.6/server/pycommon/gmBorg.py0000644000175000017500000000422312272210420017445 0ustar ncqncq#=================================================== # Thanks to Python Patterns ! # --------------------------- # $Id: gmBorg.py,v 1.7 2009-05-08 07:58:35 ncq Exp $ __version__ = "$Revision: 1.7 $" __author__ = "Karsten.Hilbert@gmx.net" __license__ = "GPL" #=================================================== class cBorg(object): """A generic Borg mixin for new-style classes. - mixin this class with your class' ancestors to borg it - there may be many instances of this - PER CHILD CLASS - but they all share state """ _instances = {} def __new__(cls, *args, **kargs): # look up subclass instance cache if cBorg._instances.get(cls) is None: #cBorg._instances[cls] = object.__new__(cls, *args, **kargs) cBorg._instances[cls] = object.__new__(cls) return cBorg._instances[cls] #=================================================== if __name__ == '__main__': class A(cBorg): pass class B(cBorg): pass class C(cBorg): def __init__(self, val='default'): self.x = val print "testing new-style classes borg" a1 = A() a2 = A() a1.a = 5 print a1.a, "==", a2.a a3 = A() print a1.a, "==", a2.a, "==", a3.a b1 = B() b1.a = 10 print b1.a print a1.a b2 = B() print b2.a c1 = C(val = 'non-default') print c1.x c2 = C(val = 'non-default 2') print c2.x c3 = C() print c3.x #=================================================== # $Log: gmBorg.py,v $ # Revision 1.7 2009-05-08 07:58:35 ncq # - __new__ doesn't take args anymore # # Revision 1.6 2008/05/21 13:57:57 ncq # - remove old borg # # Revision 1.5 2007/10/23 21:23:30 ncq # - cleanup # # Revision 1.4 2007/09/24 22:05:23 ncq # - improved docs # # Revision 1.3 2007/05/11 14:14:59 ncq # - make borg per-sublcass # # Revision 1.2 2007/05/07 12:30:05 ncq # - make cBorg an object child so properties work on it # # Revision 1.1 2004/02/25 09:30:13 ncq # - moved here from python-common # # Revision 1.3 2003/12/29 16:21:51 uid66147 # - spelling fix # # Revision 1.2 2003/11/17 10:56:35 sjtan # # synced and commiting. # # Revision 1.1 2003/10/23 06:02:38 sjtan # # manual edit areas modelled after r.terry's specs. # # Revision 1.1 2003/04/02 16:07:55 ncq # - first version # ./gnumed-server.19.6/server/pycommon/gmNull.py0000644000175000017500000001173712272210420017476 0ustar ncqncq"""null.py This is a sample implementation of the 'Null Object' design pattern. Roughly, the goal with Null objects is to provide an 'intelligent' replacement for the often used primitive data type None in Python or Null (or Null pointers) in other languages. These are used for many purposes including the important case where one member of some group of otherwise similar elements is special for whatever reason. Most often this results in conditional statements to distinguish between ordinary elements and the primitive Null value. Among the advantages of using Null objects are the following: - Superfluous conditional statements can be avoided by providing a first class object alternative for the primitive value None. - Code readability is improved. - Null objects can act as a placeholder for objects with behaviour that is not yet implemented. - Null objects can be replaced for any other class. - Null objects are very predictable at what they do. To cope with the disadvantage of creating large numbers of passive objects that do nothing but occupy memory space Null objects are often combined with the Singleton pattern. For more information use any internet search engine and look for combinations of these words: Null, object, design and pattern. Dinu C. Gherman, August 2001 For modifications see CVS changelog below. Karsten Hilbert July 2004 """ #============================================================== # $Source: /home/ncq/Projekte/cvs2git/vcs-mirror/gnumed/gnumed/client/pycommon/gmNull.py,v $ __version__ = "$Revision: 1.6 $" __author__ = "Dinu C. Gherman" __license__ = "GPL v2 or later (details at http://www.gnu.org)" #============================================================== class cNull: """A class for implementing Null objects. This class ignores all parameters passed when constructing or calling instances and traps all attribute and method requests. Instances of it always (and reliably) do 'nothing'. The code might benefit from implementing some further special Python methods depending on the context in which its instances are used. Especially when comparing and coercing Null objects the respective methods' implementation will depend very much on the environment and, hence, these special methods are not provided here. """ _warn = 0 # object constructing def __init__(self, *args, **kwargs): "Ignore parameters." try: cNull._warn = kwargs['warn'] except KeyError: pass return None # object calling def __call__(self, *args, **kwargs): "Ignore method calls." if cNull._warn: print "cNull.__call__()" return self # attribute handling def __getattr__(self, mname): "Ignore attribute requests." if cNull._warn: print "cNull.__getattr__()" return self def __setattr__(self, name, value): "Ignore attribute setting." if cNull._warn: print "cNull.__setattr__()" return self def __delattr__(self, name): "Ignore deleting attributes." if cNull._warn: print "cNull.__delattr__()" return self # misc. def __repr__(self): "Return a string representation." if cNull._warn: print "cNull.__repr__()" return "" % id(self) def __str__(self): "Convert to a string and return it." if cNull._warn: print "cNull.__str__()" return "cNull instance" def __nonzero__(self): if cNull._warn: print "cNull.__nonzero__()" return 0 def __len__(self): if cNull._warn: print "cNull.__len__()" return 0 #============================================================== def test(): "Perform some decent tests, or rather: demos." # constructing and calling n = cNull() n = cNull('value') n = cNull('value', param='value', warn=1) n() n('value') n('value', param='value') # attribute handling n.attr1 n.attr1.attr2 n.method1() n.method1().method2() n.method('value') n.method(param='value') n.method('value', param='value') n.attr1.method1() n.method1().attr1 n.attr1 = 'value' n.attr1.attr2 = 'value' del n.attr1 del n.attr1.attr2.attr3 # representation and conversion to a string tmp = '' % id(n) assert repr(n) == tmp assert str(n) == 'cNull instance' # comparing if n == 1: print "Null object == 1" else: print "Null object != 1" #-------------------------------------------------------------- if __name__ == '__main__': test() #============================================================== # $Log: gmNull.py,v $ # Revision 1.6 2005-06-28 14:12:55 cfmoro # Integration in space fixes # # Revision 1.5 2004/12/22 08:40:01 ncq # - make output more obvious # # Revision 1.4 2004/11/24 15:49:11 ncq # - use 0/1 not False/True so we can run on older pythons # # Revision 1.3 2004/08/20 08:38:47 ncq # - robustify while working on allowing inactive patient after search # # Revision 1.2 2004/07/21 07:51:47 ncq # - tabified # - __nonzero__ added # - if keyword argument 'warn' is True: warn on use of Null class # # Revision 1.1 2004/07/06 00:08:31 ncq # - null design pattern from python cookbook # ./gnumed-server.19.6/server/pycommon/gmBusinessDBObject.py0000644000175000017500000006404212272210420021711 0ustar ncqncq"""GNUmed database object business class. Overview -------- This class wraps a source relation (table, view) which represents an entity that makes immediate business sense such as a vaccination or a medical document. In many if not most cases this source relation is a denormalizing view. The data in that view will in most cases, however, originate from several normalized tables. One instance of this class represents one row of said source relation. Note, however, that this class does not *always* simply wrap a single table or view. It can also encompass several relations (views, tables, sequences etc) that taken together form an object meaningful to *business* logic. Initialization -------------- There are two ways to initialize an instance with values. One way is to pass a "primary key equivalent" object into __init__(). Refetch_payload() will then pull the data from the backend. Another way would be to fetch the data outside the instance and pass it in via the argument. In that case the instance will not initially connect to the databse which may offer a great boost to performance. Values API ---------- Field values are cached for later access. They can be accessed by a dictionary API, eg: old_value = object['field'] object['field'] = new_value The field names correspond to the respective column names in the "main" source relation. Accessing non-existant field names will raise an error, so does trying to set fields not listed in self.__class__._updatable_fields. To actually store updated values in the database one must explicitly call save_payload(). The class will in many cases be enhanced by accessors to related data that is not directly part of the business object itself but are closely related, such as codes linked to a clinical narrative entry (eg a diagnosis). Such accessors in most cases start with get_*. Related setters start with set_*. The values can be accessed via the object['field'] syntax, too, but they will be cached independantly. Concurrency handling -------------------- GNUmed connections always run transactions in isolation level "serializable". This prevents transactions happening at the *very same time* to overwrite each other's data. All but one of them will abort with a concurrency error (eg if a transaction runs a select-for-update later than another one it will hang until the first transaction ends. Then it will succeed or fail depending on what the first transaction did). This is standard transactional behaviour. However, another transaction may have updated our row between the time we first fetched the data and the time we start the update transaction. This is noticed by getting the XMIN system column for the row when initially fetching the data and using that value as a where condition value when updating the row later. If the row had been updated (xmin changed) or deleted (primary key disappeared) in the meantime the update will touch zero rows (as no row with both PK and XMIN matching is found) even if the query itself syntactically succeeds. When detecting a change in a row due to XMIN being different one needs to be careful how to represent that to the user. The row may simply have changed but it also might have been deleted and a completely new and unrelated row which happens to have the same primary key might have been created ! This row might relate to a totally different context (eg. patient, episode, encounter). One can offer all the data to the user: self.original_payload - contains the data at the last successful refetch self.modified_payload - contains the modified payload just before the last failure of save_payload() - IOW what is currently in the database self._payload - contains the currently active payload which may or may not contain changes For discussion on this see the thread starting at: http://archives.postgresql.org/pgsql-general/2004-10/msg01352.php and here http://groups.google.com/group/pgsql.general/browse_thread/thread/e3566ba76173d0bf/6cf3c243a86d9233 (google for "XMIN semantic at peril") Problem cases with XMIN: 1) not unlikely - a very old row is read with XMIN - vacuum comes along and sets XMIN to FrozenTransactionId - now XMIN changed but the row actually didn't ! - an update with "... where xmin = old_xmin ..." fails although there is no need to fail 2) quite unlikely - a row is read with XMIN - a long time passes - the original XMIN gets frozen to FrozenTransactionId - another writer comes along and changes the row - incidentally the exact same old row gets the old XMIN *again* - now XMIN is (again) the same but the data changed ! - a later update fails to detect the concurrent change !! TODO: The solution is to use our own column for optimistic locking which gets updated by an AFTER UPDATE trigger. """ #============================================================ __author__ = "K.Hilbert " __license__ = "GPL v2 or later" import sys import types import inspect import logging import datetime if __name__ == '__main__': sys.path.insert(0, '../../') from Gnumed.pycommon import gmExceptions from Gnumed.pycommon import gmPG2 from Gnumed.pycommon.gmDateTime import pydt_strftime from Gnumed.pycommon.gmTools import tex_escape_string, xetex_escape_string _log = logging.getLogger('gm.db') #============================================================ class cBusinessDBObject(object): """Represents business objects in the database. Rules: - instances ARE ASSUMED TO EXIST in the database - PK construction (aPK_obj): DOES verify its existence on instantiation (fetching data fails) - Row construction (row): allowed by using a dict of pairs field name: field value (PERFORMANCE improvement) - does NOT verify FK target existence - does NOT create new entries in the database - does NOT lazy-fetch fields on access Class scope SQL commands and variables: <_cmd_fetch_payload> - must return exactly one row - where clause argument values are expected in self.pk_obj (taken from __init__(aPK_obj)) - must return xmin of all rows that _cmds_store_payload will be updating, so views must support the xmin columns of their underlying tables <_cmds_store_payload> - one or multiple "update ... set ... where xmin_* = ... and pk* = ..." statements which actually update the database from the data in self._payload, - the last query must refetch at least the XMIN values needed to detect concurrent updates, their field names had better be the same as in _cmd_fetch_payload, - the last query CAN return other fields which is particularly useful when those other fields are computed in the backend and may thus change upon save but will not have been set by the client code explicitely - this is only really of concern if the saved subclass is to be reused after saving rather than re-instantiated - when subclasses tend to live a while after save_payload() was called and they support computed fields (say, _(some_column) you need to return *all* columns (see cEncounter) <_updatable_fields> - a list of fields available for update via object['field'] A template for new child classes: *********** start of template *********** #------------------------------------------------------------ from Gnumed.pycommon import gmBusinessDBObject from Gnumed.pycommon import gmPG2 #============================================================ # short description #------------------------------------------------------------ # search/replace "" " -> 3 "s # # use plural form, search-replace get_XXX _SQL_get_XXX = u"" " SELECT *, (xmin AS xmin_XXX) FROM XXX.v_XXX WHERE %s "" " class cXxxXxx(gmBusinessDBObject.cBusinessDBObject): "" "Represents ..."" " _cmd_fetch_payload = _SQL_get_XXX % u"pk_XXX = %s" _cmds_store_payload = [ u"" " -- typically the underlying table name UPDATE xxx.xxx SET -- typically "table_col = %(view_col)s" xxx = %(xxx)s, xxx = gm.nullify_empty_string(%(xxx)s) WHERE pk = %(pk_XXX)s AND xmin = %(xmin_XXX)s RETURNING xmin as xmin_XXX --, ... --, ... "" " ] # view columns that can be updated: _updatable_fields = [ u'xxx', u'xxx' ] #-------------------------------------------------------- # def format(self): # return u'%s' % self #------------------------------------------------------------ def get_XXX(order_by=None): if order_by is None: order_by = u'true' else: order_by = u'true ORDER BY %s' % order_by cmd = _SQL_get_XXX % order_by rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = True) return [ cXxxXxx(row = {'data': r, 'idx': idx, 'pk_field': 'pk_xxx'}) for r in rows ] #------------------------------------------------------------ def create_xxx(xxx=None, xxx=None): args = { u'xxx': xxx, u'xxx': xxx } cmd = u"" " INSERT INTO xxx.xxx ( xxx, xxx, xxx ) VALUES ( %(xxx)s, %(xxx)s, gm.nullify_empty_string(%(xxx)s) ) RETURNING pk --RETURNING * "" " rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = True, get_col_idx = False) #rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = True, get_col_idx = True) return cXxxXxx(aPK_obj = rows[0]['pk']) #return cXxxXxx(row = {'data': r, 'idx': idx, 'pk_field': 'pk_XXX'}) #------------------------------------------------------------ def delete_xxx(pk_xxx=None): args = {'pk': pk_xxx} cmd = u"DELETE FROM xxx.xxx WHERE pk = %(pk)s" gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}]) return True #------------------------------------------------------------ *********** end of template *********** """ #-------------------------------------------------------- def __init__(self, aPK_obj=None, row=None): """Init business object. Call from child classes: super(cChildClass, self).__init__(aPK_obj = aPK_obj, row = row) """ # initialize those "too early" because checking descendants might # fail which will then call __str__ in stack trace logging if --debug # was given which in turn needs those instance variables self.pk_obj = '' self._idx = {} self._payload = [] # the cache for backend object values (mainly table fields) self._ext_cache = {} # the cache for extended method's results self._is_modified = False # check descendants self.__class__._cmd_fetch_payload self.__class__._cmds_store_payload self.__class__._updatable_fields if aPK_obj is not None: self.__init_from_pk(aPK_obj=aPK_obj) else: self._init_from_row_data(row=row) self._is_modified = False #-------------------------------------------------------- def __init_from_pk(self, aPK_obj=None): """Creates a new clinical item instance by its PK. aPK_obj can be: - a simple value * the primary key WHERE condition must be a simple column - a dictionary of values * the primary key where condition must be a subselect consuming the dict and producing the single-value primary key """ self.pk_obj = aPK_obj result = self.refetch_payload() if result is True: self.original_payload = {} for field in self._idx.keys(): self.original_payload[field] = self._payload[self._idx[field]] return True if result is False: raise gmExceptions.ConstructorError, "[%s:%s]: error loading instance" % (self.__class__.__name__, self.pk_obj) #-------------------------------------------------------- def _init_from_row_data(self, row=None): """Creates a new clinical item instance given its fields. row must be a dict with the fields: - pk_field: the name of the primary key field - idx: a dict mapping field names to position - data: the field values in a list (as returned by cursor.fetchone() in the DB-API) row = {'data': row, 'idx': idx, 'pk_field': 'the PK column name'} rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) objects = [ cChildClass(row = {'data': r, 'idx': idx, 'pk_field': 'the PK column name'}) for r in rows ] """ try: self._idx = row['idx'] self._payload = row['data'] self.pk_obj = self._payload[self._idx[row['pk_field']]] except: _log.exception('faulty argument structure: %s' % row) raise gmExceptions.ConstructorError, "[%s:??]: error loading instance from row data" % self.__class__.__name__ if len(self._idx.keys()) != len(self._payload): _log.critical('field index vs. payload length mismatch: %s field names vs. %s fields' % (len(self._idx.keys()), len(self._payload))) _log.critical('faulty argument structure: %s' % row) raise gmExceptions.ConstructorError, "[%s:??]: error loading instance from row data" % self.__class__.__name__ self.original_payload = {} for field in self._idx.keys(): self.original_payload[field] = self._payload[self._idx[field]] #-------------------------------------------------------- def __del__(self): if self.__dict__.has_key('_is_modified'): if self._is_modified: _log.critical('[%s:%s]: loosing payload changes' % (self.__class__.__name__, self.pk_obj)) _log.debug('original: %s' % self.original_payload) _log.debug('modified: %s' % self._payload) #-------------------------------------------------------- def __str__(self): tmp = [] try: for attr in self._idx.keys(): if self._payload[self._idx[attr]] is None: tmp.append('%s: NULL' % attr) else: tmp.append('%s: >>%s<<' % (attr, self._payload[self._idx[attr]])) return '[%s:%s]: %s' % (self.__class__.__name__, self.pk_obj, str(tmp)) #return '[%s:%s]:\n %s' % (self.__class__.__name__, self.pk_obj, '\n '.join(lines)) except: return 'nascent [%s @ %s], cannot show payload and primary key' %(self.__class__.__name__, id(self)) #-------------------------------------------------------- def __unicode__(self): lines = [] try: for attr in self._idx.keys(): if self._payload[self._idx[attr]] is None: lines.append(u'%s: NULL' % attr) else: lines.append('%s: %s' % (attr, self._payload[self._idx[attr]])) return '[%s:%s]:\n%s' % (self.__class__.__name__, self.pk_obj, u'\n'.join(lines)) except: return 'nascent [%s @ %s], cannot show payload and primary key' %(self.__class__.__name__, id(self)) #-------------------------------------------------------- def __getitem__(self, attribute): # use try: except: as it is faster and we want this as fast as possible # 1) backend payload cache try: return self._payload[self._idx[attribute]] except KeyError: pass # 2) extension method results ... getter = getattr(self, 'get_%s' % attribute, None) if not callable(getter): _log.warning('[%s]: no attribute [%s]' % (self.__class__.__name__, attribute)) _log.warning('[%s]: valid attributes: %s' % (self.__class__.__name__, str(self._idx.keys()))) _log.warning('[%s]: no getter method [get_%s]' % (self.__class__.__name__, attribute)) methods = filter(lambda x: x[0].startswith('get_'), inspect.getmembers(self, inspect.ismethod)) _log.warning('[%s]: valid getter methods: %s' % (self.__class__.__name__, str(methods))) raise KeyError('[%s]: cannot read from key [%s]' % (self.__class__.__name__, attribute)) self._ext_cache[attribute] = getter() return self._ext_cache[attribute] #-------------------------------------------------------- def __setitem__(self, attribute, value): # 1) backend payload cache if attribute in self.__class__._updatable_fields: try: if self._payload[self._idx[attribute]] != value: self._payload[self._idx[attribute]] = value self._is_modified = True return except KeyError: _log.warning('[%s]: cannot set attribute <%s> despite marked settable' % (self.__class__.__name__, attribute)) _log.warning('[%s]: supposedly settable attributes: %s' % (self.__class__.__name__, str(self.__class__._updatable_fields))) raise KeyError('[%s]: cannot write to key [%s]' % (self.__class__.__name__, attribute)) # 2) setters providing extensions if hasattr(self, 'set_%s' % attribute): setter = getattr(self, "set_%s" % attribute) if not callable(setter): raise AttributeError('[%s] setter [set_%s] not callable' % (self.__class__.__name__, attribute)) try: del self._ext_cache[attribute] except KeyError: pass if type(value) is types.TupleType: if setter(*value): self._is_modified = True return raise AttributeError('[%s]: setter [%s] failed for [%s]' % (self.__class__.__name__, setter, value)) if setter(value): self._is_modified = True return # 3) don't know what to do with _log.error('[%s]: cannot find attribute <%s> or setter method [set_%s]' % (self.__class__.__name__, attribute, attribute)) _log.warning('[%s]: settable attributes: %s' % (self.__class__.__name__, str(self.__class__._updatable_fields))) methods = filter(lambda x: x[0].startswith('set_'), inspect.getmembers(self, inspect.ismethod)) _log.warning('[%s]: valid setter methods: %s' % (self.__class__.__name__, str(methods))) raise AttributeError('[%s]: cannot set [%s]' % (self.__class__.__name__, attribute)) #-------------------------------------------------------- # external API #-------------------------------------------------------- def same_payload(self, another_object=None): raise NotImplementedError('comparison between [%s] and [%s] not implemented' % (self, another_object)) #-------------------------------------------------------- def is_modified(self): return self._is_modified #-------------------------------------------------------- def get_fields(self): try: return self._idx.keys() except AttributeError: return 'nascent [%s @ %s], cannot return keys' %(self.__class__.__name__, id(self)) #-------------------------------------------------------- def get_updatable_fields(self): return self.__class__._updatable_fields #-------------------------------------------------------- def fields_as_dict(self, date_format='%Y %b %d %H:%M', none_string=u'', escape_style=None, bool_strings=None): if bool_strings is None: bools = {True: u'true', False: u'false'} else: bools = {True: bool_strings[0], False: bool_strings[1]} data = {} for field in self._idx.keys(): # FIXME: harden against BYTEA fields #if type(self._payload[self._idx[field]]) == ... # data[field] = _('<%s bytes of binary data>') % len(self._payload[self._idx[field]]) # continue val = self._payload[self._idx[field]] if val is None: data[field] = none_string continue if isinstance(val, bool): data[field] = bools[val] continue if isinstance(val, datetime.datetime): data[field] = pydt_strftime(val, format = date_format, encoding = 'utf8') if escape_style in [u'latex', u'tex']: data[field] = tex_escape_string(data[field]) elif escape_style in [u'xetex', u'xelatex']: data[field] = xetex_escape_string(data[field]) continue try: data[field] = unicode(val, encoding = 'utf8', errors = 'replace') except TypeError: try: data[field] = unicode(val) except (UnicodeDecodeError, TypeError): val = '%s' % str(val) data[field] = val.decode('utf8', 'replace') if escape_style in [u'latex', u'tex']: data[field] = tex_escape_string(data[field]) elif escape_style in [u'xetex', u'xelatex']: data[field] = xetex_escape_string(data[field]) return data #-------------------------------------------------------- def get_patient(self): _log.error('[%s:%s]: forgot to override get_patient()' % (self.__class__.__name__, self.pk_obj)) return None #-------------------------------------------------------- def format(self): return u'%s' % self #-------------------------------------------------------- def refetch_payload(self, ignore_changes=False): """Fetch field values from backend. """ if self._is_modified: if ignore_changes: _log.critical('[%s:%s]: loosing payload changes' % (self.__class__.__name__, self.pk_obj)) _log.debug('original: %s' % self.original_payload) _log.debug('modified: %s' % self._payload) else: _log.critical('[%s:%s]: cannot reload, payload changed' % (self.__class__.__name__, self.pk_obj)) return False if type(self.pk_obj) == types.DictType: arg = self.pk_obj else: arg = [self.pk_obj] rows, self._idx = gmPG2.run_ro_queries ( queries = [{'cmd': self.__class__._cmd_fetch_payload, 'args': arg}], get_col_idx = True ) if len(rows) == 0: _log.error('[%s:%s]: no such instance' % (self.__class__.__name__, self.pk_obj)) return False self._payload = rows[0] return True #-------------------------------------------------------- def __noop(self): pass #-------------------------------------------------------- def save(self, conn=None): return self.save_payload(conn = conn) #-------------------------------------------------------- def save_payload(self, conn=None): """Store updated values (if any) in database. Optionally accepts a pre-existing connection - returns a tuple (, ) - True: success - False: an error occurred * data is (error, message) * for error meanings see gmPG2.run_rw_queries() """ if not self._is_modified: return (True, None) args = {} for field in self._idx.keys(): args[field] = self._payload[self._idx[field]] self.modified_payload = args close_conn = self.__noop if conn is None: conn = gmPG2.get_connection(readonly=False) close_conn = conn.close queries = [] for query in self.__class__._cmds_store_payload: queries.append({'cmd': query, 'args': args}) rows, idx = gmPG2.run_rw_queries ( link_obj = conn, queries = queries, return_data = True, get_col_idx = True ) # this can happen if: # - someone else updated the row so XMIN does not match anymore # - the PK went away (rows were deleted from under us) # - another WHERE condition of the UPDATE did not produce any rows to update # - savepoints are used since subtransactions may relevantly change the xmin/xmax ... if len(rows) == 0: return (False, (u'cannot update row', _('[%s:%s]: row not updated (nothing returned), row in use ?') % (self.__class__.__name__, self.pk_obj))) # update cached values from should-be-first-and-only result # row of last query, # update all fields returned such that computed # columns see their new values row = rows[0] for key in idx: try: self._payload[self._idx[key]] = row[idx[key]] except KeyError: conn.rollback() close_conn() _log.error('[%s:%s]: cannot update instance, XMIN refetch key mismatch on [%s]' % (self.__class__.__name__, self.pk_obj, key)) _log.error('payload keys: %s' % str(self._idx)) _log.error('XMIN refetch keys: %s' % str(idx)) _log.error(args) raise conn.commit() close_conn() self._is_modified = False # update to new "original" payload self.original_payload = {} for field in self._idx.keys(): self.original_payload[field] = self._payload[self._idx[field]] return (True, None) #============================================================ def jsonclasshintify(obj): # this should eventually be somewhere else """ turn the data into a list of dicts, adding "class hints". all objects get turned into dictionaries which the other end will interpret as "object", via the __jsonclass__ hint, as specified by the JSONRPC protocol standard. """ if isinstance(obj, list): return map(jsonclasshintify, obj) elif isinstance(obj, gmPG2.dbapi.tz.FixedOffsetTimezone): # this will get decoded as "from jsonobjproxy import {clsname}" # at the remote (client) end. res = {'__jsonclass__': ["jsonobjproxy.FixedOffsetTimezone"]} res['name'] = obj._name res['offset'] = jsonclasshintify(obj._offset) return res elif isinstance(obj, datetime.timedelta): # this will get decoded as "from jsonobjproxy import {clsname}" # at the remote (client) end. res = {'__jsonclass__': ["jsonobjproxy.TimeDelta"]} res['days'] = obj.days res['seconds'] = obj.seconds res['microseconds'] = obj.microseconds return res elif isinstance(obj, datetime.time): # this will get decoded as "from jsonobjproxy import {clsname}" # at the remote (client) end. res = {'__jsonclass__': ["jsonobjproxy.Time"]} res['hour'] = obj.hour res['minute'] = obj.minute res['second'] = obj.second res['microsecond'] = obj.microsecond res['tzinfo'] = jsonclasshintify(obj.tzinfo) return res elif isinstance(obj, datetime.datetime): # this will get decoded as "from jsonobjproxy import {clsname}" # at the remote (client) end. res = {'__jsonclass__': ["jsonobjproxy.DateTime"]} res['year'] = obj.year res['month'] = obj.month res['day'] = obj.day res['hour'] = obj.hour res['minute'] = obj.minute res['second'] = obj.second res['microsecond'] = obj.microsecond res['tzinfo'] = jsonclasshintify(obj.tzinfo) return res elif isinstance(obj, cBusinessDBObject): # this will get decoded as "from jsonobjproxy import {clsname}" # at the remote (client) end. res = {'__jsonclass__': ["jsonobjproxy.%s" % obj.__class__.__name__]} for k in obj.get_fields(): t = jsonclasshintify(obj[k]) res[k] = t print "props", res, dir(obj) for attribute in dir(obj): if not attribute.startswith("get_"): continue k = attribute[4:] if res.has_key(k): continue getter = getattr(obj, attribute, None) if callable(getter): res[k] = jsonclasshintify(getter()) return res return obj #============================================================ if __name__ == '__main__': if len(sys.argv) < 2: sys.exit() if sys.argv[1] != u'test': sys.exit() #-------------------------------------------------------- class cTestObj(cBusinessDBObject): _cmd_fetch_payload = None _cmds_store_payload = None _updatable_fields = [] #---------------------------------------------------- def get_something(self): pass #---------------------------------------------------- def set_something(self): pass #-------------------------------------------------------- from Gnumed.pycommon import gmI18N gmI18N.activate_locale() gmI18N.install_domain() data = { 'pk_field': 'bogus_pk', 'idx': {'bogus_pk': 0, 'bogus_field': 1, 'bogus_date': 2}, 'data': [-1, 'bogus_data', datetime.datetime.now()] } obj = cTestObj(row=data) #print obj['wrong_field'] #print jsonclasshintify(obj) #obj['wrong_field'] = 1 print obj.fields_as_dict() #============================================================ ./gnumed-server.19.6/server/pycommon/gmShellAPI.py0000644000175000017500000002433312272210420020161 0ustar ncqncq__doc__ = """GNUmed general tools.""" #=========================================================================== __author__ = "K. Hilbert " __license__ = "GPL v2 or later (details at http://www.gnu.org)" # stdlib import os import sys import logging import subprocess import shlex _log = logging.getLogger('gm.shell') #=========================================================================== def is_cmd_in_path(cmd=None): _log.debug('cmd: [%s]', cmd) dirname = os.path.dirname(cmd) _log.debug('dir: [%s]', dirname) if dirname != u'': _log.info('command with full or relative path, not searching in PATH for binary') return (None, None) env_paths = os.environ['PATH'] _log.debug('${PATH}: %s', env_paths) for path in env_paths.split(os.pathsep): candidate = os.path.join(path, cmd).encode(sys.getfilesystemencoding()) if os.access(candidate, os.X_OK): _log.debug('found [%s]', candidate) return (True, candidate.decode(sys.getfilesystemencoding())) else: _log.debug('not found: %s', candidate) _log.debug('command not found in PATH') return (False, None) #=========================================================================== def is_executable_by_wine(cmd=None): if not cmd.startswith('wine'): _log.debug('not a WINE call: %s', cmd) return (False, None) exe_path = cmd.encode(sys.getfilesystemencoding()) exe_path = exe_path[4:].strip().strip('"').strip() # [wine "/standard/unix/path/to/binary.exe"] ? if os.access(exe_path, os.R_OK): _log.debug('WINE call with UNIX path: %s', exe_path) return (True, cmd) # detect [winepath] found, full_winepath_path = is_cmd_in_path(cmd = r'winepath') if not found: _log.error('[winepath] not found, cannot check WINE call for Windows path conformance: %s', exe_path) return (False, None) # [wine "drive:\a\windows\path\to\binary.exe"] ? cmd_line = r'%s -u "%s"' % ( full_winepath_path.encode(sys.getfilesystemencoding()), exe_path ) _log.debug('converting Windows path to UNIX path: %s' % cmd_line) cmd_line = shlex.split(cmd_line) try: winepath = subprocess.Popen ( cmd_line, stdout = subprocess.PIPE, stderr = subprocess.PIPE, universal_newlines = True ) except OSError: _log.exception('cannot run ') return (False, None) stdout, stderr = winepath.communicate() full_path = stdout.strip('\r\n') _log.debug('UNIX path: %s', full_path) if winepath.returncode != 0: _log.error(' returned [%s], failed to convert path', winepath.returncode) return (False, None) if os.access(full_path, os.R_OK): _log.debug('WINE call with Windows path') return (True, cmd) _log.warning('Windows path [%s] not verifiable under UNIX: %s', exe_path, full_path) return (False, None) #=========================================================================== def detect_external_binary(binary=None): """ is the name of the executable with or without .exe/.bat""" _log.debug('searching for [%s]', binary) binary = binary.lstrip() # is it a sufficiently qualified, directly usable, explicit path ? if os.access(binary, os.X_OK): _log.debug('found: executable explicit path') return (True, binary) # can it be found in PATH ? found, full_path = is_cmd_in_path(cmd = binary) if found: if os.access(full_path, os.X_OK): _log.debug('found: executable in ${PATH}') return (True, full_path) # does it seem to be a call via WINE ? is_wine_call, full_path = is_executable_by_wine(cmd = binary) if is_wine_call: _log.debug('found: is valid WINE call') return (True, full_path) # maybe we can be a bit smart about Windows ? if os.name == 'nt': # try .exe (but not if already .bat or .exe) if not (binary.endswith('.exe') or binary.endswith('.bat')): exe_binary = binary + r'.exe' _log.debug('re-testing as %s', exe_binary) found_dot_exe_binary, full_path = detect_external_binary(binary = exe_binary) if found_dot_exe_binary: return (True, full_path) # not found with .exe, so try .bat: bat_binary = binary + r'.bat' _log.debug('re-testing as %s', bat_binary) found_bat_binary, full_path = detect_external_binary(binary = bat_binary) if found_bat_binary: return (True, full_path) else: _log.debug('not running under Windows, not testing .exe/.bat') return (False, None) #=========================================================================== def find_first_binary(binaries=None): found = False binary = None for cmd in binaries: _log.debug('looking for [%s]', cmd) if cmd is None: continue found, binary = detect_external_binary(binary = cmd) if found: break return (found, binary) #=========================================================================== def run_command_in_shell(command=None, blocking=False, acceptable_return_codes=None): """Runs a command in a subshell via standard-C system(). The shell command to run including command line options. This will make the code *block* until the shell command exits. It will likely only work on UNIX shells where "cmd &" makes sense. http://stackoverflow.com/questions/35817/how-to-escape-os-system-calls-in-python """ if acceptable_return_codes is None: acceptable_return_codes = [0] _log.debug('shell command >>>%s<<<', command) _log.debug('blocking: %s', blocking) _log.debug('acceptable return codes: %s', str(acceptable_return_codes)) # FIXME: command should be checked for shell exploits command = command.strip() if os.name == 'nt': # http://stackoverflow.com/questions/893203/bat-files-nonblocking-run-launch if blocking is False: if not command.startswith('start '): command = 'start "GNUmed" /B "%s"' % command # elif blocking is True: # if not command.startswith('start '): # command = 'start "GNUmed" /WAIT /B "%s"' % command else: # what the following hack does is this: the user indicated # whether she wants non-blocking external display of files # - the real way to go about this is to have a non-blocking command # in the line in the mailcap file for the relevant mime types # - as non-blocking may not be desirable when *not* displaying # files from within GNUmed the really right way would be to # add a "test" clause to the non-blocking mailcap entry which # yields true if and only if GNUmed is running # - however, this is cumbersome at best and not supported in # some mailcap implementations # - so we allow the user to attempt some control over the process # from within GNUmed by setting a configuration option # - leaving it None means to use the mailcap default or whatever # was specified in the command itself # - True means: tack " &" onto the shell command if necessary # - False means: remove " &" from the shell command if its there # - all this, of course, only works in shells which support # detaching jobs with " &" (so, most POSIX shells) if blocking is True: if command[-2:] == ' &': command = command[:-2] elif blocking is False: if command[-2:] != ' &': command += ' &' _log.info('running shell command >>>%s<<<', command) # FIXME: use subprocess.Popen() ret_val = os.system(command.encode(sys.getfilesystemencoding())) _log.debug('os.system() returned: [%s]', ret_val) exited_normally = False if not hasattr(os, 'WIFEXITED'): _log.error('platform does not support exit status differentiation') if ret_val in acceptable_return_codes: _log.info('os.system() return value contained in acceptable return codes') _log.info('continuing and hoping for the best') return True return exited_normally _log.debug('exited via exit(): %s', os.WIFEXITED(ret_val)) if os.WIFEXITED(ret_val): _log.debug('exit code: [%s]', os.WEXITSTATUS(ret_val)) exited_normally = (os.WEXITSTATUS(ret_val) in acceptable_return_codes) _log.debug('normal exit: %s', exited_normally) _log.debug('dumped core: %s', os.WCOREDUMP(ret_val)) _log.debug('stopped by signal: %s', os.WIFSIGNALED(ret_val)) if os.WIFSIGNALED(ret_val): try: _log.debug('STOP signal was: [%s]', os.WSTOPSIG(ret_val)) except AttributeError: _log.debug('platform does not support os.WSTOPSIG()') try: _log.debug('TERM signal was: [%s]', os.WTERMSIG(ret_val)) except AttributeError: _log.debug('platform does not support os.WTERMSIG()') return exited_normally #=========================================================================== def run_first_available_in_shell(binaries=None, args=None, blocking=False, run_last_one_anyway=False, acceptable_return_codes=None): found, binary = find_first_binary(binaries = binaries) if not found: _log.warning('cannot find any of: %s', binaries) if run_last_one_anyway: binary = binaries[-1] _log.debug('falling back to trying to run [%s] anyway', binary) else: return False return run_command_in_shell(command = '%s %s' % (binary, args), blocking = blocking, acceptable_return_codes = acceptable_return_codes) #=========================================================================== # main #--------------------------------------------------------------------------- if __name__ == '__main__': if len(sys.argv) < 2: sys.exit() if sys.argv[1] != u'test': sys.exit() logging.basicConfig(level = logging.DEBUG) #--------------------------------------------------------- def test_detect_external_binary(): found, path = detect_external_binary(binary = sys.argv[2]) if found: print "found as:", path else: print sys.argv[2], "not found" #--------------------------------------------------------- def test_run_command_in_shell(): print "-------------------------------------" print "running:", sys.argv[2] if run_command_in_shell(command=sys.argv[2], blocking=False): print "-------------------------------------" print "success" else: print "-------------------------------------" print "failure, consult log" #--------------------------------------------------------- def test_is_cmd_in_path(): print is_cmd_in_path(cmd = sys.argv[2]) #--------------------------------------------------------- def test_is_executable_by_wine(): print is_executable_by_wine(cmd = sys.argv[2]) #--------------------------------------------------------- test_run_command_in_shell() #test_detect_external_binary() #test_is_cmd_in_path() #test_is_executable_by_wine() #=========================================================================== ./gnumed-server.19.6/server/pycommon/gmPrinting.py0000644000175000017500000002546012272210420020354 0ustar ncqncq"""GNUmed printing.""" # ======================================================================= __author__ = "K.Hilbert " __license__ = 'GPL v2 or later (details at http://www.gnu.org)' # ======================================================================= import logging import sys import os import subprocess import codecs import time if __name__ == '__main__': sys.path.insert(0, '../../') from Gnumed.pycommon import gmShellAPI from Gnumed.pycommon import gmTools from Gnumed.pycommon import gmLog2 _log = logging.getLogger('gm.printing') known_printjob_types = [ u'medication_list', u'generic_document' ] external_print_APIs = [ u'gm-print_doc', u'os_startfile', # win, mostly u'gsprint', # win u'acrobat_reader', # win u'gtklp', # Linux u'Internet_Explorer', # win u'Mac_Preview' # MacOSX ] #======================================================================= # internal print API #----------------------------------------------------------------------- def print_files(filenames=None, jobtype=None, print_api=None): _log.debug('printing "%s": %s', jobtype, filenames) for fname in filenames: try: open(fname, 'r').close() except: _log.exception('cannot open [%s], aborting', fname) return False if jobtype not in known_printjob_types: print "unregistered print job type <%s>" % jobtype _log.warning('print job type "%s" not registered', jobtype) if print_api not in external_print_APIs: _log.warning('print API "%s" unknown, trying all', print_api) if print_api == u'os_startfile': return _print_files_by_os_startfile(filenames = filenames) elif print_api == u'gm-print_doc': return _print_files_by_shellscript(filenames = filenames, jobtype = jobtype) elif print_api == u'gsprint': return _print_files_by_gsprint_exe(filenames = filenames) elif print_api == u'acrobat_reader': return _print_files_by_acroread_exe(filenames = filenames) elif print_api == u'gtklp': return _print_files_by_gtklp(filenames = filenames) elif print_api == u'Internet_Explorer': return _print_files_by_IE(filenames = filenames) elif print_api == u'Mac_Preview': return _print_files_by_mac_preview(filenames = filenames) # else try all if (sys.platform == 'darwin') or (os.name == 'mac'): if _print_files_by_mac_preview(filenames = filenames): return True elif os.name == 'posix': if _print_files_by_gtklp(filenames = filenames): return True elif os.name == 'nt': if _print_files_by_shellscript(filenames = filenames, jobtype = jobtype): return True if _print_files_by_gsprint_exe(filenames = filenames): return True if _print_files_by_acroread_exe(filenames = filenames): return True if _print_files_by_os_startfile(filenames = filenames): return True if _print_files_by_IE(filenames = filenames): return True return False if _print_files_by_shellscript(filenames = filenames, jobtype = jobtype): return True return False #======================================================================= # external print APIs #----------------------------------------------------------------------- def _print_files_by_mac_preview(filenames=None): # if os.name != 'mac': # does not work if sys.platform != 'darwin': _log.debug('MacOSX only available under MacOSX/Darwin') return False for filename in filenames: cmd_line = [ r'open', # "open" must be in the PATH r'-a Preview', # action = Preview filename ] _log.debug('printing with %s' % cmd_line) try: mac_preview = subprocess.Popen(cmd_line) except OSError: _log.debug('cannot run ') return False mac_preview.communicate() if mac_preview.returncode != 0: _log.error(' returned [%s], failed to print', mac_preview.returncode) return False return True #----------------------------------------------------------------------- def _print_files_by_IE(filenames=None): if os.name != 'nt': _log.debug('Internet Explorer only available under Windows') return False try: from win32com import client as dde_client except ImportError: _log.exception(' Python module not available for use in printing') return False try: i_explorer = dde_client.Dispatch("InternetExplorer.Application") for filename in filenames: if i_explorer.Busy: time.sleep(1) i_explorer.Navigate(os.path.normpath(filename)) if i_explorer.Busy: time.sleep(1) i_explorer.Document.printAll() i_explorer.Quit() except: _log.exception('error calling IE via DDE') return False return True #----------------------------------------------------------------------- def _print_files_by_gtklp(filenames=None): # if os.name != 'posix': if sys.platform != 'linux2': _log.debug(' only available under Linux') return False cmd_line = [ r'gtklp', r'-i', r'-# 1' ] cmd_line.extend(filenames) _log.debug('printing with %s' % cmd_line) try: gtklp = subprocess.Popen(cmd_line) except OSError: _log.debug('cannot run ') return False gtklp.communicate() if gtklp.returncode != 0: _log.error(' returned [%s], failed to print', gtklp.returncode) return False return True #----------------------------------------------------------------------- def _print_files_by_gsprint_exe(filenames=None): """Use gsprint.exe from Ghostscript tools. Windows only. - docs: http://pages.cs.wisc.edu/~ghost/gsview/gsprint.htm - download: http://www.cs.wisc.edu/~ghost/ """ if os.name != 'nt': _log.debug(' only available under Windows') return False conf_filename = gmTools.get_unique_filename ( prefix = 'gm2gsprint-', suffix = '.cfg' ).encode(sys.getfilesystemencoding()) for filename in filenames: conf_file = codecs.open(conf_filename, 'wb', 'utf8') conf_file.write('-color\n') conf_file.write('-query\n') # printer setup dialog conf_file.write('-all\n') # all pages conf_file.write('-copies 1\n') conf_file.write('%s\n' % os.path.normpath(filename)) conf_file.close() cmd_line = [ r'gsprint.exe', # "gsprint.exe" must be in the PATH r'-config "%s"' % conf_filename ] _log.debug('printing with %s' % cmd_line) try: gsprint = subprocess.Popen(cmd_line) except OSError: _log.debug('cannot run ') return False gsprint.communicate() if gsprint.returncode != 0: _log.error(' returned [%s], failed to print', gsprint.returncode) return False return True #----------------------------------------------------------------------- def _print_files_by_acroread_exe(filenames): """Use Adobe Acrobat Reader. Windows only. - docs: http://www.robvanderwoude.com/printfiles.php#PrintPDF """ if os.name != 'nt': _log.debug('Acrobat Reader only used under Windows') return False for filename in filenames: cmd_line = [ r'AcroRd32.exe', # "AcroRd32.exe" must be in the PATH r'/s', # no splash r'/o', # no open-file dialog r'/h', # minimized r'/p', # go straight to printing dialog os.path.normpath(filename) ] _log.debug('printing with %s' % cmd_line) try: acroread = subprocess.Popen(cmd_line) except OSError: _log.debug('cannot run ') cmd_line[0] = r'acroread.exe' # "acroread.exe" must be in the PATH _log.debug('printing with %s' % cmd_line) try: acroread = subprocess.Popen(cmd_line) except OSError: _log.debug('cannot run ') return False acroread.communicate() if acroread.returncode != 0: _log.error('Acrobat Reader returned [%s], failed to print', acroread.returncode) return False return True #----------------------------------------------------------------------- def _print_files_by_os_startfile(filenames=None): try: os.startfile except AttributeError: _log.error('platform does not support "os.startfile()"') return False _log.debug('printing [%s]', filenames) for filename in filenames: fname = os.path.normcase(os.path.normpath(filename)) _log.debug('%s -> %s', filename, fname) try: try: os.startfile(fname, 'print') except WindowsError, e: _log.exception('no action defined for this type of file') if e.winerror == 1155: # try action os.startfile(fname) except: _log.exception('os.startfile() failed') gmLog2.log_stack_trace() return False return True #----------------------------------------------------------------------- def _print_files_by_shellscript(filenames=None, jobtype=None): paths = gmTools.gmPaths() local_script = os.path.join(paths.local_base_dir, '..', 'external-tools', 'gm-print_doc') #candidates = [u'gm-print_doc', u'gm-print_doc.bat', local_script, u'gm-print_doc.bat'] candidates = [u'gm-print_doc', local_script, u'gm-print_doc.bat'] found, binary = gmShellAPI.find_first_binary(binaries = candidates) if not found: binary = r'gm-print_doc.bat' cmd_line = [ binary, jobtype ] cmd_line.extend(filenames) _log.debug('printing with %s', cmd_line) try: gm_print_doc = subprocess.Popen(cmd_line) except OSError: _log.debug('cannot run ') return False gm_print_doc.communicate() if gm_print_doc.returncode != 0: _log.error(' returned [%s], failed to print', gm_print_doc.returncode) return False return True # args = u' %s %s' % (jobtype, filename) # success = gmShellAPI.run_first_available_in_shell ( # binaries = candidates, # args = args, # blocking = True, # run_last_one_anyway = True # ) # # if success: # return True # # _log.error('print command failed') # return False #======================================================================= # main #----------------------------------------------------------------------- if __name__ == '__main__': if len(sys.argv) < 2: sys.exit() if sys.argv[1] != 'test': sys.exit() from Gnumed.pycommon import gmLog2 from Gnumed.pycommon import gmI18N gmI18N.activate_locale() gmI18N.install_domain() #-------------------------------------------------------------------- def test_print_files(): return print_files(filenames = [sys.argv[2]], jobtype = sys.argv[3]) #-------------------------------------------------------------------- def test_print_files_by_shellscript(): print_files(filenames = [sys.argv[2], sys.argv[2]], jobtype = u'generic_document', print_api = 'gm-print_doc') #-------------------------------------------------------------------- def test_print_files_by_gtklp(): print_files(filenames = [sys.argv[2], sys.argv[2]], jobtype = u'generic_document', print_api = u'gtklp') #-------------------------------------------------------------------- def test_print_files_by_mac_preview(): print "testing printing via Mac Preview" _print_files_by_mac_preview(filenames = [sys.argv[0]]) #-------------------------------------------------------------------- print test_print_files() #test_print_files_by_gtklp() #test_print_files_by_mac_preview() # ======================================================================= ./gnumed-server.19.6/server/pycommon/gmMimeLib.py0000644000175000017500000002442512272210420020100 0ustar ncqncq# -*- coding: utf-8 -*- """This module encapsulates mime operations. """ #======================================================================================= __author__ = "Karsten Hilbert " __license__ = "GPL" # stdlib import sys import os import mailcap import mimetypes import subprocess import shutil import logging # GNUmed if __name__ == '__main__': sys.path.insert(0, '../../') import gmShellAPI, gmTools, gmCfg2 _log = logging.getLogger('gm.docs') #======================================================================================= def guess_mimetype(aFileName = None): """Guess mime type of arbitrary file. filenames are supposed to be in Unicode """ worst_case = "application/octet-stream" # 1) use Python libextractor try: import extractor xtract = extractor.Extractor() props = xtract.extract(filename = aFileName) for prop, val in props: if (prop == 'mimetype') and (val != worst_case): return val except ImportError: _log.exception('Python wrapper for libextractor not installed.') ret_code = -1 # 2) use "file" system command # -i get mime type # -b don't display a header mime_guesser_cmd = u'file -i -b "%s"' % aFileName # this only works on POSIX with 'file' installed (which is standard, however) # it might work on Cygwin installations aPipe = os.popen(mime_guesser_cmd.encode(sys.getfilesystemencoding()), 'r') if aPipe is None: _log.debug("cannot open pipe to [%s]" % mime_guesser_cmd) else: pipe_output = aPipe.readline().replace('\n', '').strip() ret_code = aPipe.close() if ret_code is None: _log.debug('[%s]: <%s>' % (mime_guesser_cmd, pipe_output)) if pipe_output not in [u'', worst_case]: return pipe_output else: _log.error('[%s] on %s (%s): failed with exit(%s)' % (mime_guesser_cmd, os.name, sys.platform, ret_code)) # 3) use "extract" shell level libextractor wrapper mime_guesser_cmd = 'extract -p mimetype "%s"' % aFileName aPipe = os.popen(mime_guesser_cmd.encode(sys.getfilesystemencoding()), 'r') if aPipe is None: _log.debug("cannot open pipe to [%s]" % mime_guesser_cmd) else: pipe_output = aPipe.readline()[11:].replace('\n', '').strip() ret_code = aPipe.close() if ret_code is None: _log.debug('[%s]: <%s>' % (mime_guesser_cmd, pipe_output)) if pipe_output not in [u'', worst_case]: return pipe_output else: _log.error('[%s] on %s (%s): failed with exit(%s)' % (mime_guesser_cmd, os.name, sys.platform, ret_code)) # If we and up here we either have an insufficient systemwide # magic number file or we suffer from a deficient operating system # alltogether. It can't get much worse if we try ourselves. _log.info("OS level mime detection failed, falling back to built-in magic") import gmMimeMagic mime_type = gmTools.coalesce(gmMimeMagic.file(aFileName), worst_case) del gmMimeMagic _log.debug('"%s" -> <%s>' % (aFileName, mime_type)) return mime_type #----------------------------------------------------------------------------------- def get_viewer_cmd(aMimeType = None, aFileName = None, aToken = None): """Return command for viewer for this mime type complete with this file""" if aFileName is None: _log.error("You should specify a file name for the replacement of %s.") # last resort: if no file name given replace %s in original with literal '%s' # and hope for the best - we certainly don't want the module default "/dev/null" aFileName = """%s""" mailcaps = mailcap.getcaps() (viewer, junk) = mailcap.findmatch(mailcaps, aMimeType, key = 'view', filename = '%s' % aFileName) # FIXME: we should check for "x-token" flags _log.debug("<%s> viewer: [%s]" % (aMimeType, viewer)) return viewer #----------------------------------------------------------------------------------- def get_editor_cmd(mimetype=None, filename=None): if filename is None: _log.error("You should specify a file name for the replacement of %s.") # last resort: if no file name given replace %s in original with literal '%s' # and hope for the best - we certainly don't want the module default "/dev/null" filename = """%s""" mailcaps = mailcap.getcaps() (editor, junk) = mailcap.findmatch(mailcaps, mimetype, key = 'edit', filename = '%s' % filename) # FIXME: we should check for "x-token" flags _log.debug("<%s> editor: [%s]" % (mimetype, editor)) return editor #----------------------------------------------------------------------------------- def guess_ext_by_mimetype(mimetype=''): """Return file extension based on what the OS thinks a file of this mimetype should end in.""" # ask system first ext = mimetypes.guess_extension(mimetype) if ext is not None: _log.debug('<%s>: *.%s' % (mimetype, ext)) return ext _log.error("<%s>: no suitable file extension known to the OS" % mimetype) # try to help the OS a bit cfg = gmCfg2.gmCfgData() ext = cfg.get ( group = u'extensions', option = mimetype, source_order = [('user-mime', 'return'), ('system-mime', 'return')] ) if ext is not None: _log.debug('<%s>: *.%s (%s)' % (mimetype, ext, candidate)) return ext _log.error("<%s>: no suitable file extension found in config files" % mimetype) return ext #----------------------------------------------------------------------------------- def guess_ext_for_file(aFile=None): if aFile is None: return None (path_name, f_ext) = os.path.splitext(aFile) if f_ext != '': return f_ext # try to guess one mime_type = guess_mimetype(aFile) f_ext = guess_ext_by_mimetype(mime_type) if f_ext is None: _log.error('unable to guess file extension for mime type [%s]' % mime_type) return None return f_ext #----------------------------------------------------------------------------------- _system_startfile_cmd = None open_cmds = { 'xdg-open': 'xdg-open "%s"', # nascent standard on Linux 'kfmclient': 'kfmclient exec "%s"', # KDE 'gnome-open': 'gnome-open "%s"', # GNOME 'exo-open': 'exo-open "%s"', 'op': 'op "%s"', 'open': 'open "%s"' # MacOSX: "open -a AppName file" (-a allows to override the default app for the file type) #'run-mailcap' #'explorer' } def _get_system_startfile_cmd(filename): global _system_startfile_cmd if _system_startfile_cmd == u'': return False, None if _system_startfile_cmd is not None: return True, _system_startfile_cmd % filename open_cmd_candidates = ['xdg-open', 'kfmclient', 'gnome-open', 'exo-open', 'op', 'open'] for candidate in open_cmd_candidates: found, binary = gmShellAPI.detect_external_binary(binary = candidate) if not found: continue _system_startfile_cmd = open_cmds[candidate] _log.info('detected local startfile cmd: [%s]', _system_startfile_cmd) return True, _system_startfile_cmd % filename _system_startfile_cmd = u'' return False, None #----------------------------------------------------------------------------------- def convert_file(filename=None, target_mime=None, target_filename=None, target_extension=None): """Convert file from one format into another. target_mime: a mime type """ if target_extension is None: tmp, target_extension = os.path.splitext(target_filename) base_name = u'gm-convert_file' paths = gmTools.gmPaths() local_script = os.path.join(paths.local_base_dir, '..', 'external-tools', base_name) candidates = [ base_name, local_script ] #, base_name + u'.bat' found, binary = gmShellAPI.find_first_binary(binaries = candidates) if not found: binary = base_name# + r'.bat' cmd_line = [ binary, filename, target_mime, target_extension.strip('.'), target_filename ] _log.debug('converting: %s', cmd_line) try: gm_convert = subprocess.Popen(cmd_line) except OSError: _log.debug('cannot run <%s(.bat)>', base_name) return False gm_convert.communicate() if gm_convert.returncode != 0: _log.error('<%s(.bat)> returned [%s], failed to convert', base_name, gm_convert.returncode) return False return True #----------------------------------------------------------------------------------- def call_viewer_on_file(aFile = None, block=None): """Try to find an appropriate viewer with all tricks and call it. block: try to detach from viewer or not, None means to use mailcap default """ # does this file exist, actually ? try: open(aFile).close() except: _log.exception('cannot read [%s]', aFile) msg = _('[%s] is not a readable file') % aFile return False, msg # try to detect any of the UNIX openers found, startfile_cmd = _get_system_startfile_cmd(aFile) if found: if gmShellAPI.run_command_in_shell(command = startfile_cmd, blocking = block): return True, '' mime_type = guess_mimetype(aFile) viewer_cmd = get_viewer_cmd(mime_type, aFile) if viewer_cmd is not None: if gmShellAPI.run_command_in_shell(command=viewer_cmd, blocking=block): return True, '' _log.warning("no viewer found via standard mailcap system") if os.name == "posix": _log.warning("you should add a viewer for this mime type to your mailcap file") _log.info("let's see what the OS can do about that") # does the file already have an extension ? (path_name, f_ext) = os.path.splitext(aFile) # no if f_ext in ['', '.tmp']: # try to guess one f_ext = guess_ext_by_mimetype(mime_type) if f_ext is None: _log.warning("no suitable file extension found, trying anyway") file_to_display = aFile f_ext = '?unknown?' else: file_to_display = aFile + f_ext shutil.copyfile(aFile, file_to_display) # yes else: file_to_display = aFile file_to_display = os.path.normpath(file_to_display) _log.debug("file %s (ext %s) -> file %s" % (aFile, mime_type, f_ext, file_to_display)) try: os.startfile(file_to_display) except: _log.exception('os.startfile(%s) failed', file_to_display) msg = _("Unable to display the file:\n\n" " [%s]\n\n" "Your system does not seem to have a (working)\n" "viewer registered for the file type\n" " [%s]" ) % (file_to_display, mime_type) return False, msg # don't kill the file from under the (possibly async) viewer # if file_to_display != aFile: # os.remove(file_to_display) return True, '' #======================================================================================= if __name__ == "__main__": if len(sys.argv) > 1 and sys.argv[1] == u'test': filename = sys.argv[2] _get_system_startfile_cmd(filename) print _system_startfile_cmd #print guess_mimetype(filename) #print get_viewer_cmd(guess_mimetype(filename), filename) #print guess_ext_by_mimetype(mimetype=filename) ./gnumed-server.19.6/server/pycommon/__init__.py0000644000175000017500000000025512272210420017770 0ustar ncqncq #===================================================================== # $Log: __init__.py,v $ # Revision 1.1 2004-02-25 09:30:13 ncq # - moved here from python-common # ./gnumed-server.19.6/server/pycommon/gmI18N.py0000644000175000017500000004166312272210420017244 0ustar ncqncq"""GNUmed client internationalization/localization. All i18n/l10n issues should be handled through this modules. Theory of operation: To activate proper locale settings and translation services you need to - import this module - call activate_locale() - call install_domain() The translating method gettext.gettext() will then be installed into the global (!) namespace as _(). Your own modules thus need not do _anything_ (not even import gmI18N) to have _() available to them for translating strings. You need to make sure, however, that gmI18N is imported in your main module before any of the modules using it. In order to resolve circular references involving modules that absolutely _have_ to be imported before this module you can explicitly import gmI18N into them at the very beginning. The text domain (i.e. the name of the message catalog file) is derived from the name of the main executing script unless explicitly passed to install_domain(). The language you want to translate to is derived from environment variables by the locale system unless explicitly passed to install_domain(). This module searches for message catalog files in 3 main locations: - standard POSIX places (/usr/share/locale/ ...) - below "${YOURAPPNAME_DIR}/po/" - below "/../po/" For DOS/Windows I don't know of standard places so probably only the last option will work. I don't know a thing about classic Mac behaviour. New Macs are POSIX, of course. It will then try to install candidates and *verify* whether the translation works by checking for the translation of a tag within itself (this is similar to the self-compiling compiler inserting a backdoor into its self-compiled copies). If none of this works it will fall back to making _() a noop. @copyright: authors """ #=========================================================================== __author__ = "H. Herb , I. Haywood , K. Hilbert " __license__ = "GPL v2 or later (details at http://www.gnu.org)" # stdlib import sys, os.path, os, re as regex, locale, gettext, logging, codecs _log = logging.getLogger('gm.i18n') system_locale = '' system_locale_level = {} _translate_original = lambda x:x _substitutes_regex = regex.compile(r'%\(.+?\)s') # ********************************************************** # == do not remove this line =============================== # it is needed to check for successful installation of # the desired message catalog # ********************************************************** __orig_tag__ = u'Translate this or i18n into will not work properly !' # ********************************************************** # ********************************************************** # Q: I can't use non-ascii characters in labels and menus. # A: This can happen if your Python's system encoding is ASCII and # wxPython is non-unicode. Edit/create the file sitecustomize.py # (should be somewhere in your PYTHONPATH), and put these magic lines: # # import sys # sys.setdefaultencoding('iso8859-1') # replace with encoding you want to be the default one #=========================================================================== def __split_locale_into_levels(): """Split locale into language, country and variant parts. - we have observed the following formats in the wild: - de_DE@euro - ec_CA.UTF-8 - en_US:en - German_Germany.1252 """ _log.debug('splitting canonical locale [%s] into levels', system_locale) global system_locale_level system_locale_level['full'] = system_locale # trim '@' part system_locale_level['country'] = regex.split('@|:|\.', system_locale, 1)[0] # trim '_@' part system_locale_level['language'] = system_locale.split('_', 1)[0] _log.debug('system locale levels: %s', system_locale_level) #--------------------------------------------------------------------------- def __log_locale_settings(message=None): _setlocale_categories = {} for category in 'LC_ALL LC_CTYPE LC_COLLATE LC_TIME LC_MONETARY LC_MESSAGES LC_NUMERIC'.split(): try: _setlocale_categories[category] = getattr(locale, category) except: _log.warning('this OS does not have locale.%s', category) _getlocale_categories = {} for category in 'LC_CTYPE LC_COLLATE LC_TIME LC_MONETARY LC_MESSAGES LC_NUMERIC'.split(): try: _getlocale_categories[category] = getattr(locale, category) except: pass if message is not None: _log.debug(message) _log.debug('current locale settings:') _log.debug('locale.get_locale(): %s' % str(locale.getlocale())) for category in _getlocale_categories.keys(): _log.debug('locale.get_locale(%s): %s' % (category, locale.getlocale(_getlocale_categories[category]))) for category in _setlocale_categories.keys(): _log.debug('(locale.set_locale(%s): %s)' % (category, locale.setlocale(_setlocale_categories[category]))) try: _log.debug('locale.getdefaultlocale() - default (user) locale: %s' % str(locale.getdefaultlocale())) except ValueError: _log.exception('the OS locale setup seems faulty') _log.debug('encoding sanity check (also check "locale.nl_langinfo(CODESET)" below):') pref_loc_enc = locale.getpreferredencoding(do_setlocale=False) loc_enc = locale.getlocale()[1] py_str_enc = sys.getdefaultencoding() sys_fs_enc = sys.getfilesystemencoding() _log.debug('sys.getdefaultencoding(): [%s]' % py_str_enc) _log.debug('locale.getpreferredencoding(): [%s]' % pref_loc_enc) _log.debug('locale.getlocale()[1]: [%s]' % loc_enc) _log.debug('sys.getfilesystemencoding(): [%s]' % sys_fs_enc) if loc_enc is not None: loc_enc = loc_enc.upper() loc_enc_compare = loc_enc.replace(u'-', u'') else: loc_enc_compare = loc_enc if pref_loc_enc.upper().replace(u'-', u'') != loc_enc_compare: _log.warning('encoding suggested by locale (%s) does not match encoding currently set in locale (%s)' % (pref_loc_enc, loc_enc)) _log.warning('this might lead to encoding errors') for enc in [pref_loc_enc, loc_enc, py_str_enc, sys_fs_enc]: if enc is not None: try: codecs.lookup(enc) _log.debug(' module CAN handle encoding [%s]' % enc) except LookupError: _log.warning(' module can NOT handle encoding [%s]' % enc) _log.debug('on Linux you can determine a likely candidate for the encoding by running "locale charmap"') _log.debug('locale related environment variables (${LANG} is typically used):') for var in 'LANGUAGE LC_ALL LC_CTYPE LANG'.split(): try: _log.debug('${%s}=%s' % (var, os.environ[var])) except KeyError: _log.debug('${%s} not set' % (var)) _log.debug('database of locale conventions:') data = locale.localeconv() for key in data.keys(): if loc_enc is None: _log.debug(u'locale.localeconv(%s): %s', key, data[key]) else: try: _log.debug(u'locale.localeconv(%s): %s', key, unicode(data[key])) except UnicodeDecodeError: _log.debug(u'locale.localeconv(%s): %s', key, unicode(data[key], loc_enc)) _nl_langinfo_categories = {} for category in 'CODESET D_T_FMT D_FMT T_FMT T_FMT_AMPM RADIXCHAR THOUSEP YESEXPR NOEXPR CRNCYSTR ERA ERA_D_T_FMT ERA_D_FMT ALT_DIGITS'.split(): try: _nl_langinfo_categories[category] = getattr(locale, category) except: _log.warning('this OS does not support nl_langinfo category locale.%s' % category) try: for category in _nl_langinfo_categories.keys(): if loc_enc is None: _log.debug('locale.nl_langinfo(%s): %s' % (category, locale.nl_langinfo(_nl_langinfo_categories[category]))) else: try: _log.debug(u'locale.nl_langinfo(%s): %s', category, unicode(locale.nl_langinfo(_nl_langinfo_categories[category]))) except UnicodeDecodeError: _log.debug(u'locale.nl_langinfo(%s): %s', category, unicode(locale.nl_langinfo(_nl_langinfo_categories[category]), loc_enc)) except: _log.exception('this OS does not support nl_langinfo') _log.debug('gmI18N.get_encoding(): %s', get_encoding()) #--------------------------------------------------------------------------- def _translate_protected(term): """This wraps _(). It protects against translation errors such as a different number of "%s". """ translation = _translate_original(term) # different number of %s substitutes ? if translation.count(u'%s') != term.count(u'%s'): _log.error('count("%s") mismatch, returning untranslated string') _log.error('original : %s', term) _log.error('translation: %s', translation) return term term_substitutes = _substitutes_regex.findall(term) trans_substitutes = _substitutes_regex.findall(translation) # different number of %(...)s substitutes ? if len(term_substitutes) != len(trans_substitutes): _log.error('count("%(...)s") mismatch, returning untranslated string') _log.error('original : %s', term) _log.error('translation: %s', translation) return term # different %(...)s substitutes ? if set(term_substitutes) != set(trans_substitutes): _log.error('"%(...)s" name mismatch, returning untranslated string') _log.error('original : %s', term) _log.error('translation: %s', translation) return term return translation #--------------------------------------------------------------------------- # external API #--------------------------------------------------------------------------- def activate_locale(): """Get system locale from environment.""" global system_locale # logging state of affairs __log_locale_settings('unmodified startup locale settings (should be [C])') # activate user-preferred locale loc, enc = None, None try: # check whether already set loc, loc_enc = locale.getlocale() if loc is None: loc = locale.setlocale(locale.LC_ALL, '') _log.debug("activating user-default locale with returns: [%s]" % loc) else: _log.info('user-default locale already activated') loc, loc_enc = locale.getlocale() except AttributeError: _log.exception('Windows does not support locale.LC_ALL') except: _log.exception('error activating user-default locale') # logging state of affairs __log_locale_settings('locale settings after activating user-default locale') # did we find any locale setting ? assume en_EN if not if loc in [None, 'C']: _log.error('the current system locale is still [None] or [C], assuming [en_EN]') system_locale = "en_EN" else: system_locale = loc # generate system locale levels __split_locale_into_levels() return True #--------------------------------------------------------------------------- def install_domain(domain=None, language=None, prefer_local_catalog=False): """Install a text domain suitable for the main script.""" # text domain directly specified ? if domain is None: _log.info('domain not specified, deriving from script name') # get text domain from name of script domain = os.path.splitext(os.path.basename(sys.argv[0]))[0] _log.info('text domain is [%s]' % domain) # http://www.opengroup.org/onlinepubs/009695399/basedefs/xbd_chap08.html _log.debug('searching message catalog file for system locale [%s]' % system_locale) for env_var in ['LANGUAGE', 'LC_ALL', 'LC_MESSAGES', 'LANG']: tmp = os.getenv(env_var) if env_var is None: _log.debug('${%s} not set' % env_var) else: _log.debug('${%s} = [%s]' % (env_var, tmp)) if language is not None: _log.info('explicit setting of ${LANG} requested: [%s]' % language) _log.info('this will override the system locale language setting') os.environ['LANG'] = language # search for message catalog candidates = [] # - locally if prefer_local_catalog: _log.debug('preferring local message catalog') # - one level above path to binary # last resort for inferior operating systems such as DOS/Windows # strip one directory level # this is a rather neat trick :-) loc_dir = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '..', 'po')) _log.debug('looking above binary install directory [%s]' % loc_dir) candidates.append(loc_dir) # - in path to binary loc_dir = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), 'po')) _log.debug('looking in binary install directory [%s]' % loc_dir) candidates.append(loc_dir) # - standard places if os.name == 'posix': _log.debug('system is POSIX, looking in standard locations (see Python Manual)') # if this is reported to segfault/fail/except on some # systems we may have to assume "sys.prefix/share/locale/" candidates.append(gettext.bindtextdomain(domain)) else: _log.debug('No use looking in standard POSIX locations - not a POSIX system.') # - $(_DIR)/ env_key = "%s_DIR" % os.path.splitext(os.path.basename(sys.argv[0]))[0].upper() _log.debug('looking at ${%s}' % env_key) if os.environ.has_key(env_key): loc_dir = os.path.abspath(os.path.join(os.environ[env_key], 'po')) _log.debug('${%s} = "%s" -> [%s]' % (env_key, os.environ[env_key], loc_dir)) candidates.append(loc_dir) else: _log.info("${%s} not set" % env_key) # - locally if not prefer_local_catalog: # - one level above path to binary # last resort for inferior operating systems such as DOS/Windows # strip one directory level # this is a rather neat trick :-) loc_dir = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '..', 'po')) _log.debug('looking above binary install directory [%s]' % loc_dir) candidates.append(loc_dir) # - in path to binary loc_dir = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), 'po' )) _log.debug('looking in binary install directory [%s]' % loc_dir) candidates.append(loc_dir) # now try to actually install it for candidate in candidates: _log.debug('trying [%s](/%s/LC_MESSAGES/%s.mo)', candidate, system_locale, domain) if not os.path.exists(candidate): continue try: gettext.install(domain, candidate, unicode=1) except: _log.exception('installing text domain [%s] failed from [%s]', domain, candidate) continue global _ # does it translate ? if _(__orig_tag__) == __orig_tag__: _log.debug('does not translate: [%s] => [%s]', __orig_tag__, _(__orig_tag__)) continue else: _log.debug('found msg catalog: [%s] => [%s]', __orig_tag__, _(__orig_tag__)) import __builtin__ global _translate_original _translate_original = __builtin__._ __builtin__._ = _translate_protected return True # 5) install a dummy translation class _log.warning("falling back to NullTranslations() class") # this shouldn't fail dummy = gettext.NullTranslations() dummy.install() return True #=========================================================================== _encoding_mismatch_already_logged = False _current_encoding = None def get_encoding(): """Try to get a sane encoding. On MaxOSX locale.setlocale(locale.LC_ALL, '') does not have the desired effect, so that locale.getlocale()[1] still returns None. So in that case try to fallback to locale.getpreferredencoding(). - what Python itself uses to convert string <-> unicode when no other encoding was specified - ascii by default - can be set in site.py and sitecustomize.py - what the current locale is *actually* using as the encoding for text conversion - what the current locale would *recommend* using as the encoding for text conversion """ global _current_encoding if _current_encoding is not None: return _current_encoding enc = sys.getdefaultencoding() if enc != 'ascii': _current_encoding = enc return _current_encoding enc = locale.getlocale()[1] if enc is not None: _current_encoding = enc return _current_encoding global _encoding_mismatch_already_logged if not _encoding_mismatch_already_logged: _log.debug('*actual* encoding of locale is None, using encoding *recommended* by locale') _encoding_mismatch_already_logged = True return locale.getpreferredencoding(do_setlocale=False) #=========================================================================== # Main #--------------------------------------------------------------------------- if __name__ == "__main__": if len(sys.argv) == 1: sys.exit() if sys.argv[1] != u'test': sys.exit() logging.basicConfig(level = logging.DEBUG) print "======================================================================" print "GNUmed i18n" print "" print "authors:", __author__ print "license:", __license__, "; version:", __version__ print "======================================================================" activate_locale() print "system locale: ", system_locale, "; levels:", system_locale_level print "likely encoding:", get_encoding() if len(sys.argv) > 1: install_domain(domain = sys.argv[2]) else: install_domain() # ******************************************************** # == do not remove this line ============================= # it is needed to check for successful installation of # the desired message catalog # ******************************************************** tmp = _('Translate this or i18n into will not work properly !') # ******************************************************** # ******************************************************** #===================================================================== ./gnumed-server.19.6/server/pycommon/gmExceptions.py0000644000175000017500000000637312272210420020705 0ustar ncqncq############################################################################ # # gmExceptions - classes for exceptions gnumed modules may throw # -------------------------------------------------------------------------- # # @author: Dr. Horst Herb # @copyright: author # @license: GPL v2 or later (details at http://www.gnu.org) # @dependencies: nil # @change log: # 07.02.2002 hherb first draft, untested ############################################################################ class AccessDenied(Exception): def __init__(self, msg, source=None, code=None, details=None): self.errmsg = msg self.source = source self.code = code self.details = details #---------------------------------- def __str__(self): txt = self.errmsg if self.source is not None: txt += u'\nSource: %s' % self.source if self.code is not None: txt += u'\nCode: %s' % self.code if self.details is not None: txt += u'\n%s' % self.details return txt #---------------------------------- def __repr__(self): txt = self.errmsg if self.source is not None: txt += u'\nSource: %s' % source if self.code is not None: txt += u'\nCode: %s' % self.code if self.details is not None: txt += u'\n%s' % self.details return txt #------------------------------------------------------------ class DatabaseObjectInUseError(Exception): def __init__(self, msg): self.errmsg = msg def __str__(self): return self.errmsg class ConnectionError(Exception): #raised whenever the database backend connection fails def __init__(self, errmsg): self.errmsg=errmsg def __str__(self): return self.errmsg class ConfigError(Exception): #raised whenever a configuration error occurs def __init__(self, errmsg): self.errmsg=errmsg def __str__(self): return self.errmsg class NoGuiError(Exception): def __init__(self, errmsg): self.errmsg=errmsg def __str__(self): return self.errmsg class PureVirtualFunction(Exception): #raised whenever the database backend connection fails def __init__(self, errmsg=None): if errmsg is not None: self.errmsg=errmsg else: self.errmsg="Attempt to call a pure virtual function!" def __str__(self): return self.errmsg #------------------------------------------------------------ # constructor errors class ConstructorError(Exception): """Raised when a constructor fails.""" def __init__(self, errmsg = None): if errmsg is None: self.errmsg = "%s.__init__() failed" % self.__class__.__name__ else: self.errmsg = errmsg def __str__(self): return self.errmsg # business DB-object exceptions class NoSuchBusinessObjectError(ConstructorError): """Raised when a business db-object can not be found.""" def __init__(self, errmsg = None): if errmsg is None: self.errmsg = "no such business DB-object found" else: self.errmsg = errmsg def __str__(self): return self.errmsg #------------------------------------------------------------ class InvalidInputError(Exception): """Raised by business layers when an attempt is made to input invalid data""" def __init__(self, errmsg = None): if errmsg is None: self.errmsg = "%s.__init__() failed" % self.__class__.__name__ else: self.errmsg = errmsg def __str__(self): return self.errmsg #===================================================================== ./gnumed-server.19.6/server/pycommon/gmCfg.py0000644000175000017500000004616512272210420017266 0ustar ncqncq"""GNUmed configuration handling. This source of configuration information is supported: - database tables Theory of operation: It is helpful to have a solid log target set up before importing this module in your code. This way you will be able to see even those log messages generated during module import. Once your software has established database connectivity you can set up a config source from the database. You can limit the option applicability by the constraints "workplace", "user", and "cookie". The basic API for handling items is get()/set(). The database config objects auto-sync with the backend. @copyright: GPL v2 or later """ # TODO: # - optional arg for set -> type #================================================================== __author__ = "Karsten Hilbert " # standard modules import sys, types, cPickle, decimal, logging, re as regex # gnumed modules if __name__ == '__main__': sys.path.insert(0, '../../') from Gnumed.pycommon import gmPG2, gmTools _log = logging.getLogger('gm.cfg') # don't change this without knowing what you do as # it will already be in many databases cfg_DEFAULT = "xxxDEFAULTxxx" #================================================================== def get_all_options(order_by=None): if order_by is None: order_by = u'' else: order_by = u'ORDER BY %s' % order_by cmd = u""" SELECT * FROM ( SELECT vco.*, cs.value FROM cfg.v_cfg_options vco JOIN cfg.cfg_string cs ON (vco.pk_cfg_item = cs.fk_item) UNION ALL SELECT vco.*, cn.value::text FROM cfg.v_cfg_options vco JOIN cfg.cfg_numeric cn ON (vco.pk_cfg_item = cn.fk_item) UNION ALL SELECT vco.*, csa.value::text FROM cfg.v_cfg_options vco JOIN cfg.cfg_str_array csa ON (vco.pk_cfg_item = csa.fk_item) UNION ALL SELECT vco.*, cd.value::text FROM cfg.v_cfg_options vco JOIN cfg.cfg_data cd ON (vco.pk_cfg_item = cd.fk_item) ) as option_list %s""" % order_by rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) return rows #================================================================== # FIXME: make a cBorg around this class cCfgSQL: def __init__(self): self.ro_conn = gmPG2.get_connection() #----------------------------------------------- # external API #----------------------------------------------- def get(self, option=None, workplace=None, cookie=None, bias=None, default=None, sql_return_type=None): return self.get2 ( option = option, workplace = workplace, cookie = cookie, bias = bias, default = default, sql_return_type = sql_return_type ) #----------------------------------------------- def get2(self, option=None, workplace=None, cookie=None, bias=None, default=None, sql_return_type=None): """Retrieve configuration option from backend. @param bias: Determine the direction into which to look for config options. 'user': When no value is found for "current_user/workplace" look for a value for "current_user" regardless of workspace. The corresponding concept is: "Did *I* set this option anywhere on this site ? If so, reuse the value." 'workplace': When no value is found for "current_user/workplace" look for a value for "workplace" regardless of user. The corresponding concept is: "Did anyone set this option for *this workplace* ? If so, reuse that value." @param default: if no value is found for the option this value is returned instead, also the option is set to this value in the backend, if a missing option will NOT be created in the backend @param sql_return_type: a PostgreSQL type the value of the option is to be cast to before returning, if None no cast will be applied, you will want to make sure that sql_return_type and type(default) are compatible """ if None in [option, workplace]: raise ValueError, 'neither