--- a/cwconfig.py Wed Jun 02 16:13:28 2010 +0200
+++ b/cwconfig.py Wed Jun 02 16:30:36 2010 +0200
@@ -291,7 +291,9 @@
name = None
# log messages format (see logging module documentation for available keys)
log_format = '%(asctime)s - (%(name)s) %(levelname)s: %(message)s'
- # nor remove appobjects based on unused interface
+ # the format below can be useful to debug multi thread issues:
+ # log_format = '%(asctime)s - [%(threadName)s] (%(name)s) %(levelname)s: %(message)s'
+ # nor remove appobjects based on unused interface [???]
cleanup_interface_sobjects = True
# debug mode
debugmode = False
@@ -689,7 +691,17 @@
else:
logthreshold = self['log-threshold']
self.debugmode = debug
- init_log(debug, syslog, logthreshold, logfile, self.log_format)
+ if sys.platform == 'win32':
+ # no logrotate on win32, so use logging rotation facilities
+ # for now, hard code weekly rotation every sunday, and 52 weeks kept
+ # idea: make this configurable?
+ init_log(debug, syslog, logthreshold, logfile, self.log_format,
+ rotation_parameters={'when': 'W7', # every sunday
+ 'interval': 1,
+ 'backupCount': 52,
+ })
+ else:
+ init_log(debug, syslog, logthreshold, logfile, self.log_format)
# configure simpleTal logger
logging.getLogger('simpleTAL').setLevel(logging.ERROR)
--- a/server/repository.py Wed Jun 02 16:13:28 2010 +0200
+++ b/server/repository.py Wed Jun 02 16:30:36 2010 +0200
@@ -572,7 +572,7 @@
user._cw = user.cw_rset.req = session
user.clear_related_cache()
self._sessions[session.id] = session
- self.info('opened %s', session)
+ self.info('opened session %s for user %s', session.id, login)
self.hm.call_hooks('session_open', session)
# commit session at this point in case write operation has been done
# during `session_open` hooks
--- a/server/sources/__init__.py Wed Jun 02 16:13:28 2010 +0200
+++ b/server/sources/__init__.py Wed Jun 02 16:30:36 2010 +0200
@@ -54,7 +54,9 @@
class TimedCache(dict):
def __init__(self, ttlm, ttls=0):
# time to live in minutes
- self.ttl = timedelta(0, ttlm*60 + ttls, 0)
+ self.ttl = timedelta(seconds=ttlm*60 + ttls)
+ if self.ttl.seconds <= 0:
+ raise ValueError('TimedCache initialized with a ttl of %ss' % self.ttl.seconds)
def __setitem__(self, key, value):
dict.__setitem__(self, key, (datetime.now(), value))
--- a/server/sources/ldapuser.py Wed Jun 02 16:13:28 2010 +0200
+++ b/server/sources/ldapuser.py Wed Jun 02 16:30:36 2010 +0200
@@ -33,7 +33,7 @@
WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
FOR A PARTICULAR PURPOSE.
"""
-
+from __future__ import division
from base64 import b64decode
from logilab.common.textutils import splitstrip
@@ -158,7 +158,7 @@
('cache-life-time',
{'type' : 'time',
'default': '2h',
- 'help': 'life time of query cache in minutes (default to two hours).',
+ 'help': 'life time of query cache (default to two hours).',
'group': 'ldap-source', 'level': 3,
}),
@@ -187,9 +187,11 @@
for o in self.user_classes]
self._conn = None
self._cache = {}
+ # ttlm is in minutes!
ttlm = time_validator(None, None,
- source_config.get('cache-life-time', 2*60))
- self._query_cache = TimedCache(ttlm)
+ source_config.get('cache-life-time', 2*60*60)) // 60
+ self._query_cache = TimedCache(max(ttlm, 1))
+ # interval is in seconds !
self._interval = time_validator(None, None,
source_config.get('synchronization-interval',
24*60*60))
@@ -197,13 +199,15 @@
def reset_caches(self):
"""method called during test to reset potential source caches"""
self._cache = {}
- self._query_cache = TimedCache(2*60)
+ self._query_cache = TimedCache(2*60) # TimedCache is in minutes!
def init(self):
"""method called by the repository once ready to handle request"""
self.info('ldap init')
- self.repo.looping_task(self._interval, self.synchronize)
- self.repo.looping_task(self._query_cache.ttl.seconds/10,
+ # set minimum period of 5min 1s (the additional second is to minimize
+ # resonnance effet)
+ self.repo.looping_task(max(301, self._interval), self.synchronize)
+ self.repo.looping_task(max(7, self._query_cache.ttl.seconds // 10),
self._query_cache.clear_expired)
def synchronize(self):
--- a/server/sources/native.py Wed Jun 02 16:13:28 2010 +0200
+++ b/server/sources/native.py Wed Jun 02 16:30:36 2010 +0200
@@ -256,6 +256,7 @@
# we need a lock to protect eid attribution function (XXX, really?
# explain)
self._eid_creation_lock = Lock()
+ self._eid_creation_cnx = self.get_connection()
# (etype, attr) / storage mapping
self._storages = {}
# entity types that may be used by other multi-sources instances
@@ -730,13 +731,48 @@
self.doexec(session, sql)
def create_eid(self, session):
- self._eid_creation_lock.acquire()
+ self.debug('create eid')
+ # lock needed to prevent 'Connection is busy with results for another command (0)' errors with SQLServer
+ self._eid_creation_lock.acquire()
+ try:
+ return self.__create_eid()
+ finally:
+ self._eid_creation_lock.release()
+
+ def __create_eid(self):
+ # internal function doing the eid creation without locking.
+ # needed for the recursive handling of disconnections (otherwise we
+ # deadlock on self._eid_creation_lock
+ if self._eid_creation_cnx is None:
+ self._eid_creation_cnx = self.get_connection()
+ cnx = self._eid_creation_cnx
+ cursor = cnx.cursor()
try:
for sql in self.dbhelper.sqls_increment_sequence('entities_id_seq'):
- cursor = self.doexec(session, sql)
- return cursor.fetchone()[0]
- finally:
- self._eid_creation_lock.release()
+ cursor.execute(sql)
+ eid = cursor.fetchone()[0]
+ except (self.OperationalError, self.InterfaceError):
+ # FIXME: better detection of deconnection pb
+ self.warning("trying to reconnect create eid connection")
+ self._eid_creation_cnx = None
+ return self.__create_eid()
+ except (self.DbapiError,), exc:
+ # We get this one with pyodbc and SQL Server when connection was reset
+ if exc.args[0] == '08S01':
+ self.warning("trying to reconnect create eid connection")
+ self._eid_creation_cnx = None
+ return self.__create_eid()
+ else:
+ raise
+ except: # WTF?
+ cnx.rollback()
+ self._eid_creation_cnx = None
+ self.exception('create eid failed in an unforeseen way on SQL statement %s', sql)
+ raise
+ else:
+ cnx.commit()
+ return eid
+
def add_info(self, session, entity, source, extid, complete):
"""add type and source info for an eid into the system table"""