delete-trailing-whitespaces tls-sprint
authorsylvain.thenault@logilab.fr
Fri, 24 Apr 2009 19:46:21 +0200
branchtls-sprint
changeset 1482 93c613913912
parent 1481 8ea54e7be3e2
child 1483 1367efbcd6a5
delete-trailing-whitespaces
devtools/fake.py
server/repository.py
server/sources/extlite.py
--- a/devtools/fake.py	Fri Apr 24 19:46:14 2009 +0200
+++ b/devtools/fake.py	Fri Apr 24 19:46:21 2009 +0200
@@ -24,13 +24,13 @@
         self.apphome = apphome
         self._cubes = cubes
         self['auth-mode'] = 'cookie'
-        self['uid'] = None 
+        self['uid'] = None
         self['base-url'] = BASE_URL
         self['rql-cache-size'] = 100
-       
+
     def cubes(self, expand=False):
         return self._cubes
-    
+
     def sources(self):
         return {}
 
@@ -41,7 +41,7 @@
         self.properties = {'ui.encoding': 'UTF8',
                            'ui.language': 'en',
                            }
-        
+
     def property_value(self, key):
         return self.properties[key]
 
@@ -51,10 +51,10 @@
         'views' : [Mock(id='primary'), Mock(id='secondary'),
                          Mock(id='oneline'), Mock(id='list')],
         }
-    
+
     def registry_objects(self, name, oid=None):
         return self._registries[name]
-    
+
     def etype_class(self, etype):
         class Entity(dict):
             e_schema = self.schema[etype]
@@ -112,15 +112,15 @@
     def set_header(self, header, value):
         """set an output HTTP header"""
         pass
-    
+
     def add_header(self, header, value):
         """set an output HTTP header"""
         pass
-    
+
     def remove_header(self, header):
         """remove an output HTTP header"""
         pass
-    
+
     def get_header(self, header, default=None):
         """return the value associated with the given input header,
         raise KeyError if the header is not set
@@ -169,7 +169,7 @@
         self.is_internal_session = False
         self.is_super_session = self.user.eid == -1
         self._query_data = {}
-        
+
     def execute(self, *args):
         pass
     def commit(self, *args):
@@ -186,7 +186,7 @@
 
     def set_entity_cache(self, entity):
         pass
-    
+
 class FakeRepo(object):
     querier = None
     def __init__(self, schema, vreg=None, config=None):
@@ -214,7 +214,7 @@
             self.eids[eid] = extid
             source.after_entity_insertion(session, extid, entity)
             return eid
-        
+
     def eid2extid(self, source, eid, session=None):
         return self.eids[eid]
 
@@ -229,7 +229,7 @@
     def __init__(self, uri):
         self.uri = uri
 
-        
+
 class FakePool(object):
     def source(self, uri):
         return FakeSource(uri)
--- a/server/repository.py	Fri Apr 24 19:46:14 2009 +0200
+++ b/server/repository.py	Fri Apr 24 19:46:21 2009 +0200
@@ -60,7 +60,7 @@
         remove inserted eid from repository type/source cache
         """
         self.repo.clear_caches(self.session.query_data('pendingeids', ()))
-        
+
     def rollback_event(self):
         """the observed connections pool has been rollbacked,
         remove inserted eid from repository type/source cache
@@ -84,7 +84,7 @@
         session.repo.system_source.fti_unindex_entity(session, entity.eid)
         for container in entity.fti_containers():
             session.repo.index_entity(session, container)
-            
+
     def commit_event(self):
         pass
 
@@ -120,14 +120,14 @@
             'DELETE X %s Y WHERE NOT X eid %%(x)s, Y eid %%(y)s' % rtype,
             {'x': eidfrom, 'y': eidto}, 'y')
 
-    
+
 class Repository(object):
     """a repository provides access to a set of persistent storages for
     entities and relations
 
     XXX protect pyro access
     """
-    
+
     def __init__(self, config, vreg=None, debug=False):
         self.config = config
         if vreg is None:
@@ -155,7 +155,7 @@
         for uri, source_config in config.sources().items():
             if uri == 'admin':
                 # not an actual source
-                continue 
+                continue
             source = self.get_source(uri, source_config)
             self.sources_by_uri[uri] = source
             self.sources.append(source)
@@ -214,16 +214,16 @@
                 source.init_creating()
         # close initialization pool and reopen fresh ones for proper
         # initialization now that we know cubes
-        self._get_pool().close(True) 
+        self._get_pool().close(True)
         for i in xrange(config['connections-pool-size']):
             self._available_pools.put_nowait(ConnectionsPool(self.sources))
-        
+
     # internals ###############################################################
 
     def get_source(self, uri, source_config):
         source_config['uri'] = uri
         return get_source(source_config, self.schema, self)
-        
+
     def set_schema(self, schema, resetvreg=True):
         schema.rebuild_infered_relations()
         self.info('set schema %s %#x', schema.name, id(schema))
@@ -259,7 +259,7 @@
             except Exception, ex:
                 import traceback
                 traceback.print_exc()
-                raise Exception('Is the database initialised ? (cause: %s)' % 
+                raise Exception('Is the database initialised ? (cause: %s)' %
                                 (ex.args and ex.args[0].strip() or 'unknown')), \
                                 None, sys.exc_info()[-1]
             self.info('set the actual schema')
@@ -277,7 +277,7 @@
             session.close()
         self.config.init_cubes(self.get_cubes())
         self.set_schema(appschema)
-        
+
     def set_bootstrap_schema(self, schema):
         """disable hooks when setting a bootstrap schema, but restore
         the configuration for the next time
@@ -295,7 +295,7 @@
         config.schema_hooks = True
         config.notification_hooks = True
         config.application_hooks = True
-            
+
     def start_looping_tasks(self):
         assert isinstance(self._looping_tasks, list), 'already started'
         for i, (interval, func) in enumerate(self._looping_tasks):
@@ -308,7 +308,7 @@
 
     def looping_task(self, interval, func):
         """register a function to be called every `interval` seconds.
-        
+
         looping tasks can only be registered during repository initialization,
         once done this method will fail.
         """
@@ -321,7 +321,7 @@
         """start function in a separated thread"""
         t = RepoThread(func, self._running_threads)
         t.start()
-        
+
     #@locked
     def _get_pool(self):
         try:
@@ -332,7 +332,7 @@
                             'connections) or to much load on the server (in '
                             'which case you can try to set a bigger '
                             'connections pools size)')
-        
+
     def _free_pool(self, pool):
         pool.rollback()
         self._available_pools.put_nowait(pool)
@@ -382,7 +382,7 @@
                       ((hits + misses) * 100) / (hits + misses + nocache))
         except ZeroDivisionError:
             pass
-        
+
     def authenticate_user(self, session, login, password):
         """validate login / password, raise AuthenticationError on failure
         return associated CWUser instance on success
@@ -415,9 +415,9 @@
         euser.groups
         euser.properties
         return euser
-        
+
     # public (dbapi) interface ################################################
-            
+
     def get_schema(self):
         """return the application schema. This is a public method, not
         requiring a session id
@@ -469,7 +469,7 @@
         finally:
             session.close()
         return vcconf
-    
+
     @cached
     def source_defs(self):
         sources = self.config.sources().copy()
@@ -526,13 +526,13 @@
         finally:
             session.close()
         return True
-        
+
     def connect(self, login, password, cnxprops=None):
         """open a connection for a given user
 
         base_url may be needed to send mails
         cnxtype indicate if this is a pyro connection or a in-memory connection
-        
+
         raise `AuthenticationError` if the authentication failed
         raise `ConnectionError` if we can't open a connection
         """
@@ -584,7 +584,7 @@
                 raise
         finally:
             session.reset_pool()
-    
+
     def describe(self, sessionid, eid):
         """return a tuple (type, source, extid) for the entity with id <eid>"""
         session = self._get_session(sessionid, setpool=True)
@@ -618,12 +618,12 @@
         self.debug('begin commit for session %s', sessionid)
         try:
             self._get_session(sessionid, setpool=True).commit()
-        except (ValidationError, Unauthorized): 
+        except (ValidationError, Unauthorized):
             raise
         except:
             self.exception('unexpected error')
             raise
-        
+
     def rollback(self, sessionid):
         """commit transaction for the session with the given id"""
         self.debug('begin rollback for session %s', sessionid)
@@ -645,7 +645,7 @@
         session.close()
         del self._sessions[sessionid]
         self.info('closed session %s for user %s', sessionid, session.user.login)
-    
+
     def user_info(self, sessionid, props=None):
         """this method should be used by client to:
         * check session id validity
@@ -659,9 +659,9 @@
                 session.change_property(prop, value)
         user = session.user
         return user.eid, user.login, user.groups, user.properties
-            
+
     # public (inter-repository) interface #####################################
-    
+
     def entities_modified_since(self, etypes, mtime):
         """function designed to be called from an external repository which
         is using this one as a rql source for synchronization, and return a
@@ -683,7 +683,7 @@
             session.close()
 
     # session handling ########################################################
-        
+
     def close_sessions(self):
         """close every opened sessions"""
         for sessionid in self._sessions.keys():
@@ -705,7 +705,7 @@
                 self.close(session.id)
                 nbclosed += 1
         return nbclosed
-    
+
     def internal_session(self, cnxprops=None):
         """return a dbapi like connection/cursor using internal user which
         have every rights on the repository. You'll *have to* commit/rollback
@@ -716,7 +716,7 @@
         session = InternalSession(self, cnxprops)
         session.set_pool()
         return session
-            
+
     def _get_session(self, sessionid, setpool=False):
         """return the user associated to the given session identifier"""
         try:
@@ -731,7 +731,7 @@
     # * correspondance between eid and (type, source)
     # * correspondance between eid and local id (i.e. specific to a given source)
     # * searchable text indexes
-    
+
     def type_and_source_from_eid(self, eid, session=None):
         """return a tuple (type, source, extid) for the entity with id <eid>"""
         try:
@@ -771,15 +771,15 @@
             rqlcache.pop('Any X WHERE X eid %s' % eid, None)
             for source in self.sources:
                 source.clear_eid_cache(eid, etype)
-                
+
     def type_from_eid(self, eid, session=None):
         """return the type of the entity with id <eid>"""
         return self.type_and_source_from_eid(eid, session)[0]
-    
+
     def source_from_eid(self, eid, session=None):
         """return the source for the given entity's eid"""
         return self.sources_by_uri[self.type_and_source_from_eid(eid, session)[1]]
-        
+
     def eid2extid(self, source, eid, session=None):
         """get local id from an eid"""
         etype, uri, extid = self.type_and_source_from_eid(eid, session)
@@ -848,7 +848,7 @@
         except:
             session.rollback(reset_pool)
             raise
-        
+
     def add_info(self, session, entity, source, extid=None, complete=True):
         """add type and source info for an eid into the system table,
         and index the entity with the full text index
@@ -862,11 +862,11 @@
         if self.do_fti:
             FTIndexEntityOp(session, entity=entity)
         CleanupEidTypeCacheOp(session)
-        
+
     def delete_info(self, session, eid):
         self._prepare_delete_info(session, eid)
         self._delete_info(session, eid)
-        
+
     def _prepare_delete_info(self, session, eid):
         """prepare the repository for deletion of an entity:
         * update the fti
@@ -877,7 +877,7 @@
         pending = session.query_data('pendingeids', set(), setdefault=True)
         pending.add(eid)
         CleanupEidTypeCacheOp(session)
-        
+
     def _delete_info(self, session, eid):
         """delete system information on deletion of an entity:
         * delete all relations on this entity
@@ -886,7 +886,7 @@
         etype, uri, extid = self.type_and_source_from_eid(eid, session)
         self._clear_eid_relations(session, etype, eid)
         self.system_source.delete_info(session, eid, etype, uri, extid)
-        
+
     def _clear_eid_relations(self, session, etype, eid):
         """when a entity is deleted, build and execute rql query to delete all
         its relations
@@ -917,7 +917,7 @@
             return
         alreadydone.add(entity.eid)
         self.system_source.fti_index_entity(session, entity)
-        
+
     def locate_relation_source(self, session, subject, rtype, object):
         subjsource = self.source_from_eid(subject, session)
         objsource = self.source_from_eid(object, session)
@@ -928,17 +928,17 @@
         else:
             source = subjsource
         return source
-    
+
     def locate_etype_source(self, etype):
         for source in self.sources:
             if source.support_entity(etype, 1):
                 return source
         else:
             raise ETypeNotSupportedBySources(etype)
-        
+
     def glob_add_entity(self, session, entity):
         """add an entity to the repository
-        
+
         the entity eid should originaly be None and a unique eid is assigned to
         the entity instance
         """
@@ -981,7 +981,7 @@
                 self.hm.call_hooks('after_add_relation', attr, session,
                                     entity.eid, attr, value)
         return entity.eid
-        
+
     def glob_update_entity(self, session, entity):
         """replace an entity in the repository
         the type and the eid of an entity must not be changed
@@ -1051,7 +1051,7 @@
         if source.should_call_hooks:
             self.hm.call_hooks('after_delete_entity', etype, session, eid)
         # don't clear cache here this is done in a hook on commit
-        
+
     def glob_add_relation(self, session, subject, rtype, object):
         """add a relation to the repository"""
         assert subject is not None
@@ -1089,7 +1089,7 @@
 
 
     # pyro handling ###########################################################
-    
+
     def pyro_register(self, host=''):
         """register the repository as a pyro object"""
         from Pyro import core
@@ -1108,7 +1108,7 @@
         self.info(msg, nsgroup, nsid)
         self.pyro_registered = True
         return daemon
-    
+
     def pyro_nameserver(self, host=None, group=None):
         """locate and bind the the name server to the daemon"""
         from Pyro import naming, errors
@@ -1123,25 +1123,25 @@
         return nameserver
 
     # multi-sources planner helpers ###########################################
-    
+
     @cached
     def rel_type_sources(self, rtype):
         return [source for source in self.sources
                 if source.support_relation(rtype)
                 or rtype in source.dont_cross_relations]
-    
+
     @cached
     def can_cross_relation(self, rtype):
         return [source for source in self.sources
                 if source.support_relation(rtype)
                 and rtype in source.cross_relations]
-    
+
     @cached
     def is_multi_sources_relation(self, rtype):
         return any(source for source in self.sources
                    if not source is self.system_source
                    and source.support_relation(rtype))
-    
+
 
 def pyro_unregister(config):
     """unregister the repository from the pyro name server"""
--- a/server/sources/extlite.py	Fri Apr 24 19:46:14 2009 +0200
+++ b/server/sources/extlite.py	Fri Apr 24 19:46:21 2009 +0200
@@ -22,7 +22,7 @@
         timeout -= 0.2
         if timeout <= 0:
             raise RuntimeError("svn source is busy, can't acquire connection lock")
-        
+
 class ConnectionWrapper(object):
     def __init__(self, source=None):
         self.source = source
@@ -34,19 +34,19 @@
             timeout_acquire(self.source._cnxlock, 5)
             self._cnx = self.source._sqlcnx
         return self._cnx
-    
+
     def commit(self):
         if self._cnx is not None:
             self._cnx.commit()
-        
+
     def rollback(self):
         if self._cnx is not None:
             self._cnx.rollback()
-        
+
     def cursor(self):
         return self.cnx.cursor()
 
-    
+
 class SQLiteAbstractSource(AbstractSource):
     """an abstract class for external sources using a sqlite database helper
     """
@@ -59,7 +59,7 @@
             native.NONSYSTEM_ETYPES.add(etype)
         for rtype in cls.support_relations:
             native.NONSYSTEM_RELATIONS.add(rtype)
-        
+
     options = (
         ('helper-db-path',
          {'type' : 'string',
@@ -69,10 +69,10 @@
           'inputlevel': 2,
           }),
     )
-            
+
     def __init__(self, repo, appschema, source_config, *args, **kwargs):
         # the helper db is used to easy querying and will store everything but
-        # actual file content 
+        # actual file content
         dbpath = source_config.get('helper-db-path')
         if dbpath is None:
             dbpath = join(repo.config.appdatahome,
@@ -91,7 +91,7 @@
         # * create the connection when needed
         # * use a lock to be sure only one connection is used
         self._cnxlock = threading.Lock()
-        
+
     @property
     def _sqlcnx(self):
         # XXX: sqlite connections can only be used in the same thread, so
@@ -138,13 +138,13 @@
                          self.repo.config['uid'])
             chown(self.dbpath, self.repo.config['uid'])
         restrict_perms_to_user(self.dbpath, self.info)
-        
+
     def set_schema(self, schema):
         super(SQLiteAbstractSource, self).set_schema(schema)
         if self._need_sql_create and self._is_schema_complete() and self.dbpath:
             self._create_database()
         self.rqlsqlgen = self.sqlgen_class(schema, self.sqladapter.dbhelper)
-                
+
     def get_connection(self):
         return ConnectionWrapper(self)
 
@@ -168,11 +168,11 @@
                 cnx._cnx = None
             finally:
                 self._cnxlock.release()
-        
+
     def syntax_tree_search(self, session, union,
                            args=None, cachekey=None, varmap=None, debug=0):
-        """return result from this source for a rql query (actually from a rql 
-        syntax tree and a solution dictionary mapping each used variable to a 
+        """return result from this source for a rql query (actually from a rql
+        syntax tree and a solution dictionary mapping each used variable to a
         possible type). If cachekey is given, the query necessary to fetch the
         results (but not the results themselves) may be cached using this key.
         """
@@ -185,7 +185,7 @@
         args = self.sqladapter.merge_args(args, query_args)
         cursor = session.pool[self.uri]
         cursor.execute(sql, args)
-        return self.sqladapter.process_result(cursor) 
+        return self.sqladapter.process_result(cursor)
 
     def local_add_entity(self, session, entity):
         """insert the entity in the local database.
@@ -198,7 +198,7 @@
         attrs = self.sqladapter.preprocess_entity(entity)
         sql = self.sqladapter.sqlgen.insert(SQL_PREFIX + str(entity.e_schema), attrs)
         cu.execute(sql, attrs)
-        
+
     def add_entity(self, session, entity):
         """add a new entity to the source"""
         raise NotImplementedError()
@@ -213,14 +213,14 @@
         cu = session.pool[self.uri]
         if attrs is None:
             attrs = self.sqladapter.preprocess_entity(entity)
-        sql = self.sqladapter.sqlgen.update(SQL_PREFIX + str(entity.e_schema), attrs,
-                                            [SQL_PREFIX + 'eid'])
+        sql = self.sqladapter.sqlgen.update(SQL_PREFIX + str(entity.e_schema),
+                                            attrs, [SQL_PREFIX + 'eid'])
         cu.execute(sql, attrs)
-        
+
     def update_entity(self, session, entity):
         """update an entity in the source"""
         raise NotImplementedError()
-        
+
     def delete_entity(self, session, etype, eid):
         """delete an entity from the source
 
@@ -228,11 +228,11 @@
         source. Main usage is to delete repository content when a Repository
         entity is deleted.
         """
-        sqlcursor = session.pool[self.uri]        
+        sqlcursor = session.pool[self.uri]
         attrs = {SQL_PREFIX + 'eid': eid}
         sql = self.sqladapter.sqlgen.delete(SQL_PREFIX + etype, attrs)
         sqlcursor.execute(sql, attrs)
-    
+
     def delete_relation(self, session, subject, rtype, object):
         """delete a relation from the source"""
         rschema = self.schema.rschema(rtype)
@@ -246,5 +246,5 @@
         else:
             attrs = {'eid_from': subject, 'eid_to': object}
             sql = self.sqladapter.sqlgen.delete('%s_relation' % rtype, attrs)
-        sqlcursor = session.pool[self.uri]        
+        sqlcursor = session.pool[self.uri]
         sqlcursor.execute(sql, attrs)