# HG changeset patch # User Sylvain Thénault # Date 1263978825 -3600 # Node ID 74c1597f8a8234eb84b08d6ab2f9cf7bdac8975e # Parent e6b9fd79a247e77f7e527049449e17c0441fe3f8# Parent 15d541321a8cd39b77987752fc5a82cc47d7cce9 merge diff -r 15d541321a8c -r 74c1597f8a82 MANIFEST.in --- a/MANIFEST.in Wed Jan 20 10:13:02 2010 +0100 +++ b/MANIFEST.in Wed Jan 20 10:13:45 2010 +0100 @@ -17,7 +17,6 @@ recursive-include i18n *.pot *.po recursive-include schemas *.py *.sql.* -recursive-include common/test/data * recursive-include entities/test/data * recursive-include sobjects/test/data * recursive-include server/test/data * diff -r 15d541321a8c -r 74c1597f8a82 __init__.py --- a/__init__.py Wed Jan 20 10:13:02 2010 +0100 +++ b/__init__.py Wed Jan 20 10:13:45 2010 +0100 @@ -7,7 +7,6 @@ :license: Library General Public License version 2 - http://www.gnu.org/licenses """ __docformat__ = "restructuredtext en" -from cubicweb.__pkginfo__ import version as __version__ import __builtin__ # '_' is available in builtins to mark internationalized string but should @@ -19,9 +18,7 @@ import sys, os, logging from StringIO import StringIO -from urllib import quote as urlquote, unquote as urlunquote -from logilab.common.decorators import cached from logilab.common.logging_ext import set_log_methods @@ -30,6 +27,8 @@ else: logging.basicConfig() +from cubicweb.__pkginfo__ import version as __version__ + set_log_methods(sys.modules[__name__], logging.getLogger('cubicweb')) @@ -57,222 +56,7 @@ "Binary objects must use raw strings, not %s" % data.__class__ StringIO.write(self, data) - -class RequestSessionMixIn(object): - """mixin class containing stuff shared by server session and web request - """ - def __init__(self, vreg): - self.vreg = vreg - try: - encoding = vreg.property_value('ui.encoding') - except: # no vreg or property not registered - encoding = 'utf-8' - self.encoding = encoding - # cache result of execution for (rql expr / eids), - # should be emptied on commit/rollback of the server session / web - # connection - self.local_perm_cache = {} - - def property_value(self, key): - if self.user: - return self.user.property_value(key) - return self.vreg.property_value(key) - - def etype_rset(self, etype, size=1): - """return a fake result set for a particular entity type""" - from cubicweb.rset import ResultSet - rset = ResultSet([('A',)]*size, '%s X' % etype, - description=[(etype,)]*size) - def get_entity(row, col=0, etype=etype, req=self, rset=rset): - return req.vreg.etype_class(etype)(req, rset, row, col) - rset.get_entity = get_entity - return self.decorate_rset(rset) - - def eid_rset(self, eid, etype=None): - """return a result set for the given eid without doing actual query - (we have the eid, we can suppose it exists and user has access to the - entity) - """ - from cubicweb.rset import ResultSet - eid = typed_eid(eid) - if etype is None: - etype = self.describe(eid)[0] - rset = ResultSet([(eid,)], 'Any X WHERE X eid %(x)s', {'x': eid}, - [(etype,)]) - return self.decorate_rset(rset) - - def empty_rset(self): - """return an empty result set. This is used e.g. to substitute - to a real result set if the user doesn't have permission to - access the results of a query. - """ - from cubicweb.rset import ResultSet - return self.decorate_rset(ResultSet([], 'Any X WHERE X eid -1')) - - def entity_from_eid(self, eid, etype=None): - try: - return self.entity_cache(eid) - except KeyError: - rset = self.eid_rset(eid, etype) - entity = rset.get_entity(0, 0) - self.set_entity_cache(entity) - return entity - - def entity_cache(self, eid): - raise KeyError - def set_entity_cache(self, entity): - pass - - def create_entity(self, etype, _cw_unsafe=False, **kwargs): - """add a new entity of the given type - - Example (in a shell session): - - c = create_entity('Company', name=u'Logilab') - create_entity('Person', works_for=c, firstname=u'John', lastname=u'Doe') - - """ - if _cw_unsafe: - execute = self.unsafe_execute - else: - execute = self.execute - rql = 'INSERT %s X' % etype - relations = [] - restrictions = set() - cachekey = [] - pending_relations = [] - for attr, value in kwargs.items(): - if isinstance(value, (tuple, list, set, frozenset)): - if len(value) == 1: - value = iter(value).next() - else: - del kwargs[attr] - pending_relations.append( (attr, value) ) - continue - if hasattr(value, 'eid'): # non final relation - rvar = attr.upper() - # XXX safer detection of object relation - if attr.startswith('reverse_'): - relations.append('%s %s X' % (rvar, attr[len('reverse_'):])) - else: - relations.append('X %s %s' % (attr, rvar)) - restriction = '%s eid %%(%s)s' % (rvar, attr) - if not restriction in restrictions: - restrictions.add(restriction) - cachekey.append(attr) - kwargs[attr] = value.eid - else: # attribute - relations.append('X %s %%(%s)s' % (attr, attr)) - if relations: - rql = '%s: %s' % (rql, ', '.join(relations)) - if restrictions: - rql = '%s WHERE %s' % (rql, ', '.join(restrictions)) - created = execute(rql, kwargs, cachekey).get_entity(0, 0) - for attr, values in pending_relations: - if attr.startswith('reverse_'): - restr = 'Y %s X' % attr[len('reverse_'):] - else: - restr = 'X %s Y' % attr - execute('SET %s WHERE X eid %%(x)s, Y eid IN (%s)' % ( - restr, ','.join(str(r.eid) for r in values)), - {'x': created.eid}, 'x') - return created - - # url generation methods ################################################## - - def build_url(self, *args, **kwargs): - """return an absolute URL using params dictionary key/values as URL - parameters. Values are automatically URL quoted, and the - publishing method to use may be specified or will be guessed. - """ - # use *args since we don't want first argument to be "anonymous" to - # avoid potential clash with kwargs - assert len(args) == 1, 'only 0 or 1 non-named-argument expected' - method = args[0] - base_url = kwargs.pop('base_url', None) - if base_url is None: - base_url = self.base_url() - if '_restpath' in kwargs: - assert method == 'view', method - path = kwargs.pop('_restpath') - else: - path = method - if not kwargs: - return u'%s%s' % (base_url, path) - return u'%s%s?%s' % (base_url, path, self.build_url_params(**kwargs)) - - - def build_url_params(self, **kwargs): - """return encoded params to incorporate them in an URL""" - args = [] - for param, values in kwargs.items(): - if not isinstance(values, (list, tuple)): - values = (values,) - for value in values: - args.append(u'%s=%s' % (param, self.url_quote(value))) - return '&'.join(args) - - def url_quote(self, value, safe=''): - """urllib.quote is not unicode safe, use this method to do the - necessary encoding / decoding. Also it's designed to quote each - part of a url path and so the '/' character will be encoded as well. - """ - if isinstance(value, unicode): - quoted = urlquote(value.encode(self.encoding), safe=safe) - return unicode(quoted, self.encoding) - return urlquote(str(value), safe=safe) - - def url_unquote(self, quoted): - """returns a unicode unquoted string - - decoding is based on `self.encoding` which is the encoding - used in `url_quote` - """ - if isinstance(quoted, unicode): - quoted = quoted.encode(self.encoding) - try: - return unicode(urlunquote(quoted), self.encoding) - except UnicodeDecodeError: # might occurs on manually typed URLs - return unicode(urlunquote(quoted), 'iso-8859-1') - - - # session's user related methods ##################################### - - @cached - def user_data(self): - """returns a dictionnary with this user's information""" - userinfo = {} - if self.is_internal_session: - userinfo['login'] = "cubicweb" - userinfo['name'] = "cubicweb" - userinfo['email'] = "" - return userinfo - user = self.actual_session().user - userinfo['login'] = user.login - userinfo['name'] = user.name() - userinfo['email'] = user.get_email() - return userinfo - - def is_internal_session(self): - """overrided on the server-side""" - return False - - # abstract methods to override according to the web front-end ############# - - def base_url(self): - """return the root url of the instance""" - raise NotImplementedError - - def decorate_rset(self, rset): - """add vreg/req (at least) attributes to the given result set """ - raise NotImplementedError - - def describe(self, eid): - """return a tuple (type, sourceuri, extid) for the entity with id """ - raise NotImplementedError - - -# XXX 2.45 is allowing nicer entity type names, use this map for bw compat +# use this dictionary for renaming of entity types while keeping bw compat ETYPE_NAME_MAP = {# 3.2 migration 'ECache': 'CWCache', 'EUser': 'CWUser', @@ -285,31 +69,18 @@ 'EConstraintType': 'CWConstraintType', 'EConstraint': 'CWConstraint', 'EPermission': 'CWPermission', - # 2.45 migration - 'Eetype': 'CWEType', - 'Ertype': 'CWRType', - 'Efrdef': 'CWAttribute', - 'Enfrdef': 'CWRelation', - 'Econstraint': 'CWConstraint', - 'Econstrainttype': 'CWConstraintType', - 'Epermission': 'CWPermission', - 'Egroup': 'CWGroup', - 'Euser': 'CWUser', - 'Eproperty': 'CWProperty', - 'Emailaddress': 'EmailAddress', - 'Rqlexpression': 'RQLExpression', - 'Trinfo': 'TrInfo', } - # XXX cubic web cube migration map CW_MIGRATION_MAP = {'erudi': 'cubicweb', - 'eaddressbook': 'addressbook', 'ebasket': 'basket', 'eblog': 'blog', 'ebook': 'book', + 'eclassschemes': 'keyword', + 'eclassfolders': 'folder', + 'eclasstags': 'tag', 'ecomment': 'comment', 'ecompany': 'company', 'econference': 'conference', @@ -329,20 +100,6 @@ 'ezone': 'zone', 'i18ncontent': 'i18ncontent', 'svnfile': 'vcsfile', - - 'eclassschemes': 'keyword', - 'eclassfolders': 'folder', - 'eclasstags': 'tag', - - 'jpl': 'jpl', - 'jplintra': 'jplintra', - 'jplextra': 'jplextra', - 'jplorg': 'jplorg', - 'jplrecia': 'jplrecia', - 'crm': 'crm', - 'agueol': 'agueol', - 'docaster': 'docaster', - 'asteretud': 'asteretud', } def neg_role(role): @@ -362,9 +119,6 @@ except AttributeError: return neg_role(obj.role) -def underline_title(title, car='-'): - return title+'\n'+(car*len(title)) - class CubicWebEventManager(object): """simple event / callback manager. diff -r 15d541321a8c -r 74c1597f8a82 __pkginfo__.py --- a/__pkginfo__.py Wed Jan 20 10:13:02 2010 +0100 +++ b/__pkginfo__.py Wed Jan 20 10:13:45 2010 +0100 @@ -7,7 +7,7 @@ distname = "cubicweb" modname = "cubicweb" -numversion = (3, 5, 11) +numversion = (3, 6, 0) version = '.'.join(str(num) for num in numversion) license = 'LGPL' @@ -48,7 +48,6 @@ scripts = [s for s in glob.glob(join('bin', 'cubicweb-*')) if not s.endswith('.bat')] include_dirs = [join('test', 'data'), - join('common', 'test', 'data'), join('server', 'test', 'data'), join('web', 'test', 'data'), join('devtools', 'test', 'data'), diff -r 15d541321a8c -r 74c1597f8a82 _exceptions.py --- a/_exceptions.py Wed Jan 20 10:13:02 2010 +0100 +++ b/_exceptions.py Wed Jan 20 10:13:45 2010 +0100 @@ -20,7 +20,7 @@ if self.args: return self.msg % tuple(self.args) return self.msg - return ' '.join(str(arg) for arg in self.args) + return ' '.join(unicode(arg) for arg in self.args) class ConfigurationError(CubicWebException): diff -r 15d541321a8c -r 74c1597f8a82 appobject.py --- a/appobject.py Wed Jan 20 10:13:02 2010 +0100 +++ b/appobject.py Wed Jan 20 10:13:45 2010 +0100 @@ -11,28 +11,13 @@ import types from logging import getLogger -from datetime import datetime, timedelta, time +from warnings import warn from logilab.common.decorators import classproperty from logilab.common.deprecation import deprecated from logilab.common.logging_ext import set_log_methods -from rql.nodes import VariableRef, SubQuery -from rql.stmts import Union, Select - from cubicweb import Unauthorized, NoSelectableObject -from cubicweb.utils import UStringIO, ustrftime, strptime, todate, todatetime - -ONESECOND = timedelta(0, 1, 0) -CACHE_REGISTRY = {} - - -class Cache(dict): - def __init__(self): - super(Cache, self).__init__() - _now = datetime.now() - self.cache_creation_date = _now - self.latest_cache_lookup = _now # selector base classes and operations ######################################## @@ -100,11 +85,15 @@ return AndSelector(self, other) def __rand__(self, other): return AndSelector(other, self) + def __iand__(self, other): + raise NotImplementedError('cant use inplace & (binary and)') def __or__(self, other): return OrSelector(self, other) def __ror__(self, other): return OrSelector(other, self) + def __ior__(self, other): + raise NotImplementedError('cant use inplace | (binary or)') def __invert__(self): return NotSelector(self) @@ -220,7 +209,7 @@ :__registry__: name of the registry for this object (string like 'views', 'templates'...) - :id: + :__regid__: object's identifier in the registry (string like 'main', 'primary', 'folder_box') :__select__: @@ -229,343 +218,200 @@ Moreover, the `__abstract__` attribute may be set to True to indicate that a appobject is abstract and should not be registered. - At registration time, the following attributes are set on the class: - :vreg: - the instance's registry - :schema: - the instance's schema - :config: - the instance's configuration + At selection time, the following attributes are set on the instance: + + :_cw: + current request + :cw_extra_kwargs: + other received arguments - At selection time, the following attributes are set on the instance: - :req: - current request - :rset: + only if rset is found in arguments (in which case rset/row/col will be + removed from cwextra_kwargs): + + :cw_rset: context result set or None - :row: + :cw_row: if a result set is set and the context is about a particular cell in the result set, and not the result set as a whole, specify the row number we are interested in, else None - :col: + :cw_col: if a result set is set and the context is about a particular cell in the result set, and not the result set as a whole, specify the col number we are interested in, else None """ __registry__ = None - id = None + __regid__ = None __select__ = yes() @classmethod - def classid(cls): - """returns a unique identifier for the appobject""" - return '%s.%s' % (cls.__module__, cls.__name__) - - # XXX bw compat code - @classmethod - def build___select__(cls): - for klass in cls.mro(): - if klass.__name__ == 'AppObject': - continue # the bw compat __selector__ is there - klassdict = klass.__dict__ - if ('__select__' in klassdict and '__selectors__' in klassdict - and '__selgenerated__' not in klassdict): - raise TypeError("__select__ and __selectors__ can't be used together on class %s" % cls) - if '__selectors__' in klassdict and '__selgenerated__' not in klassdict: - cls.__selgenerated__ = True - # case where __selectors__ is defined locally (but __select__ - # is in a parent class) - selectors = klassdict['__selectors__'] - if len(selectors) == 1: - # micro optimization: don't bother with AndSelector if there's - # only one selector - select = _instantiate_selector(selectors[0]) - else: - select = AndSelector(*selectors) - cls.__select__ = select - - @classmethod - def registered(cls, registry): + def __registered__(cls, registry): """called by the registry when the appobject has been registered. It must return the object that will be actually registered (this may be the right hook to create an instance for example). By default the appobject is returned without any transformation. """ - cls.build___select__() - cls.vreg = registry.vreg - cls.schema = registry.schema - cls.config = registry.config - cls.register_properties() + try: # XXX < 3.6 bw compat + pdefs = cls.property_defs + except AttributeError: + pdefs = getattr(cls, 'cw_property_defs', {}) + else: + warn('property_defs is deprecated, use cw_property_defs in %s' + % cls, DeprecationWarning) + for propid, pdef in pdefs.items(): + pdef = pdef.copy() # may be shared + pdef['default'] = getattr(cls, propid, pdef['default']) + pdef['sitewide'] = getattr(cls, 'site_wide', pdef.get('sitewide')) + registry.vreg.register_property(cls._cwpropkey(propid), **pdef) return cls - @classmethod - def vreg_initialization_completed(cls): - pass + def __init__(self, req, **extra): + super(AppObject, self).__init__() + self._cw = req + try: + self.cw_rset = extra.pop('rset') + self.cw_row = extra.pop('row', None) + self.cw_col = extra.pop('col', None) + except KeyError: + pass + self.cw_extra_kwargs = extra - # Eproperties definition: - # key: id of the property (the actual CWProperty key is build using - # .. - # value: tuple (property type, vocabfunc, default value, property description) - # possible types are those used by `logilab.common.configuration` + # persistent class properties ############################################## + # + # optional `cw_property_defs` dict on a class defines available persistent + # properties for this class: + # + # * key: id of the property (the actual CWProperty key is build using + # .. + # * value: tuple (property type, vocabfunc, default value, property description) + # possible types are those used by `logilab.common.configuration` # # notice that when it exists multiple objects with the same id (adaptation, # overriding) only the first encountered definition is considered, so those # objects can't try to have different default values for instance. - - property_defs = {} - - @classmethod - def register_properties(cls): - for propid, pdef in cls.property_defs.items(): - pdef = pdef.copy() # may be shared - pdef['default'] = getattr(cls, propid, pdef['default']) - pdef['sitewide'] = getattr(cls, 'site_wide', pdef.get('sitewide')) - cls.vreg.register_property(cls.propkey(propid), **pdef) + # + # you can then access to a property value using self.cw_propval, where self + # is an instance of class @classmethod - def propkey(cls, propid): - return '%s.%s.%s' % (cls.__registry__, cls.id, propid) - - @classproperty - @deprecated('use __select__ and & or | operators') - def __selectors__(cls): - selector = cls.__select__ - if isinstance(selector, AndSelector): - return tuple(selector.selectors) - if not isinstance(selector, tuple): - selector = (selector,) - return selector - - def __init__(self, req=None, rset=None, row=None, col=None, **extra): - super(AppObject, self).__init__() - self.req = req - self.rset = rset - self.row = row - self.col = col - self.extra_kwargs = extra - - def get_cache(self, cachename): - """ - NOTE: cachename should be dotted names as in : - - cubicweb.mycache - - cubes.blog.mycache - - etc. - """ - if cachename in CACHE_REGISTRY: - cache = CACHE_REGISTRY[cachename] - else: - cache = CACHE_REGISTRY[cachename] = Cache() - _now = datetime.now() - if _now > cache.latest_cache_lookup + ONESECOND: - ecache = self.req.execute('Any C,T WHERE C is CWCache, C name %(name)s, C timestamp T', - {'name':cachename}).get_entity(0,0) - cache.latest_cache_lookup = _now - if not ecache.valid(cache.cache_creation_date): - cache.clear() - cache.cache_creation_date = _now - return cache - - def propval(self, propid): - assert self.req - return self.req.property_value(self.propkey(propid)) - - def limited_rql(self): - """return a printable rql for the result set associated to the object, - with limit/offset correctly set according to maximum page size and - currently displayed page when necessary + def _cwpropkey(cls, propid): + """return cw property key for the property of the given id for this + class """ - # try to get page boundaries from the navigation component - # XXX we should probably not have a ref to this component here (eg in - # cubicweb.common) - nav = self.vreg['components'].select_object('navigation', self.req, - rset=self.rset) - if nav: - start, stop = nav.page_boundaries() - rql = self._limit_offset_rql(stop - start, start) - # result set may have be limited manually in which case navigation won't - # apply - elif self.rset.limited: - rql = self._limit_offset_rql(*self.rset.limited) - # navigation component doesn't apply and rset has not been limited, no - # need to limit query - else: - rql = self.rset.printable_rql() - return rql + return '%s.%s.%s' % (cls.__registry__, cls.__regid__, propid) - def _limit_offset_rql(self, limit, offset): - rqlst = self.rset.syntax_tree() - if len(rqlst.children) == 1: - select = rqlst.children[0] - olimit, ooffset = select.limit, select.offset - select.limit, select.offset = limit, offset - rql = rqlst.as_string(kwargs=self.rset.args) - # restore original limit/offset - select.limit, select.offset = olimit, ooffset - else: - newselect = Select() - newselect.limit = limit - newselect.offset = offset - aliases = [VariableRef(newselect.get_variable(vref.name, i)) - for i, vref in enumerate(rqlst.selection)] - newselect.set_with([SubQuery(aliases, rqlst)], check=False) - newunion = Union() - newunion.append(newselect) - rql = rqlst.as_string(kwargs=self.rset.args) - rqlst.parent = None - return rql + def cw_propval(self, propid): + """return cw property value associated to key - def view(self, __vid, rset=None, __fallback_oid=None, __registry='views', - **kwargs): - """shortcut to self.vreg.view method avoiding to pass self.req""" - return self.vreg[__registry].render(__vid, self.req, __fallback_oid, - rset=rset, **kwargs) - - def initialize_varmaker(self): - varmaker = self.req.get_page_data('rql_varmaker') - if varmaker is None: - varmaker = self.req.varmaker - self.req.set_page_data('rql_varmaker', varmaker) - self.varmaker = varmaker - - # url generation methods ################################################## - - controller = 'view' - - def build_url(self, *args, **kwargs): - """return an absolute URL using params dictionary key/values as URL - parameters. Values are automatically URL quoted, and the - publishing method to use may be specified or will be guessed. + .. """ - # use *args since we don't want first argument to be "anonymous" to - # avoid potential clash with kwargs - if args: - assert len(args) == 1, 'only 0 or 1 non-named-argument expected' - method = args[0] - else: - method = None - # XXX I (adim) think that if method is passed explicitly, we should - # not try to process it and directly call req.build_url() - if method is None: - method = self.controller - if method == 'view' and self.req.from_controller() == 'view' and \ - not '_restpath' in kwargs: - method = self.req.relative_path(includeparams=False) or 'view' - return self.req.build_url(method, **kwargs) + return self._cw.property_value(self._cwpropkey(propid)) + + # deprecated ############################################################### + + @property + @deprecated('[3.6] use self.__regid__') + def id(self): + return self.__regid__ - # various resources accessors ############################################# - - def entity(self, row, col=0): - """short cut to get an entity instance for a particular row/column - (col default to 0) - """ - return self.rset.get_entity(row, col) + @property + @deprecated('[3.6] use self._cw.vreg') + def vreg(self): + return self._cw.vreg - def complete_entity(self, row, col=0, skip_bytes=True): - """short cut to get an completed entity instance for a particular - row (all instance's attributes have been fetched) - """ - entity = self.entity(row, col) - entity.complete(skip_bytes=skip_bytes) - return entity + @property + @deprecated('[3.6] use self._cw.vreg.schema') + def schema(self): + return self._cw.vreg.schema + + @property + @deprecated('[3.6] use self._cw.vreg.config') + def config(self): + return self._cw.vreg.config - def user_rql_callback(self, args, msg=None): - """register a user callback to execute some rql query and return an url - to call it ready to be inserted in html - """ - def rqlexec(req, rql, args=None, key=None): - req.execute(rql, args, key) - return self.user_callback(rqlexec, args, msg) + @property + @deprecated('[3.6] use self._cw') + def req(self): + return self._cw + + @deprecated('[3.6] use self.cw_rset') + def get_rset(self): + return self.cw_rset + @deprecated('[3.6] use self.cw_rset') + def set_rset(self, rset): + self.cw_rset = rset + rset = property(get_rset, set_rset) - def user_callback(self, cb, args, msg=None, nonify=False): - """register the given user callback and return an url to call it ready to be - inserted in html - """ - from simplejson import dumps - self.req.add_js('cubicweb.ajax.js') - cbname = self.req.register_onetime_callback(cb, *args) - msg = dumps(msg or '') - return "javascript:userCallbackThenReloadPage('%s', %s)" % ( - cbname, msg) - - # formating methods ####################################################### + @property + @deprecated('[3.6] use self.cw_row') + def row(self): + return self.cw_row - def tal_render(self, template, variables): - """render a precompiled page template with variables in the given - dictionary as context - """ - from cubicweb.ext.tal import CubicWebContext - context = CubicWebContext() - context.update({'self': self, 'rset': self.rset, '_' : self.req._, - 'req': self.req, 'user': self.req.user}) - context.update(variables) - output = UStringIO() - template.expand(context, output) - return output.getvalue() + @property + @deprecated('[3.6] use self.cw_col') + def col(self): + return self.cw_col + + @property + @deprecated('[3.6] use self.cw_extra_kwargs') + def extra_kwargs(self): + return self.cw_extra_kwargs + @deprecated('[3.6] use self._cw.view') + def view(self, *args, **kwargs): + return self._cw.view(*args, **kwargs) + + @property + @deprecated('[3.6] use self._cw.varmaker') + def varmaker(self): + return self._cw.varmaker + + @deprecated('[3.6] use self._cw.get_cache') + def get_cache(self, cachename): + return self._cw.get_cache(cachename) + + @deprecated('[3.6] use self._cw.build_url') + def build_url(self, *args, **kwargs): + return self._cw.build_url(*args, **kwargs) + + @deprecated('[3.6] use self.cw_rset.limited_rql') + def limited_rql(self): + return self.cw_rset.limited_rql() + + @deprecated('[3.6] use self.cw_rset.complete_entity(row,col) instead') + def complete_entity(self, row, col=0, skip_bytes=True): + return self.cw_rset.complete_entity(row, col, skip_bytes) + + @deprecated('[3.6] use self.cw_rset.get_entity(row,col) instead') + def entity(self, row, col=0): + return self.cw_rset.get_entity(row, col) + + @deprecated('[3.6] use self._cw.user_rql_callback') + def user_rql_callback(self, args, msg=None): + return self._cw.user_rql_callback(args, msg) + + @deprecated('[3.6] use self._cw.user_callback') + def user_callback(self, cb, args, msg=None, nonify=False): + return self._cw.user_callback(cb, args, msg, nonify) + + @deprecated('[3.6] use self._cw.format_date') def format_date(self, date, date_format=None, time=False): - """return a string for a date time according to instance's - configuration - """ - if date: - if date_format is None: - if time: - date_format = self.req.property_value('ui.datetime-format') - else: - date_format = self.req.property_value('ui.date-format') - return ustrftime(date, date_format) - return u'' + return self._cw.format_date(date, date_format, time) + @deprecated('[3.6] use self._cw.format_timoe') def format_time(self, time): - """return a string for a time according to instance's - configuration - """ - if time: - return ustrftime(time, self.req.property_value('ui.time-format')) - return u'' + return self._cw.format_time(time) + @deprecated('[3.6] use self._cw.format_float') def format_float(self, num): - """return a string for floating point number according to instance's - configuration - """ - if num: - return self.req.property_value('ui.float-format') % num - return u'' + return self._cw.format_float(num) + @deprecated('[3.6] use self._cw.parse_datetime') def parse_datetime(self, value, etype='Datetime'): - """get a datetime or time from a string (according to etype) - Datetime formatted as Date are accepted - """ - assert etype in ('Datetime', 'Date', 'Time'), etype - # XXX raise proper validation error - if etype == 'Datetime': - format = self.req.property_value('ui.datetime-format') - try: - return todatetime(strptime(value, format)) - except ValueError: - pass - elif etype == 'Time': - format = self.req.property_value('ui.time-format') - try: - # (adim) I can't find a way to parse a Time with a custom format - date = strptime(value, format) # this returns a DateTime - return time(date.hour, date.minute, date.second) - except ValueError: - raise ValueError('can\'t parse %r (expected %s)' % (value, format)) - try: - format = self.req.property_value('ui.date-format') - dt = strptime(value, format) - if etype == 'Datetime': - return todatetime(dt) - return todate(dt) - except ValueError: - raise ValueError('can\'t parse %r (expected %s)' % (value, format)) + return self._cw.parse_datetime(value, etype) - # security related methods ################################################ - - def ensure_ro_rql(self, rql): - """raise an exception if the given rql is not a select query""" - first = rql.split(' ', 1)[0].lower() - if first in ('insert', 'set', 'delete'): - raise Unauthorized(self.req._('only select queries are authorized')) + @deprecated('[3.6] use self.cw_propval') + def propval(self, propid): + return self._cw.property_value(self._cwpropkey(propid)) set_log_methods(AppObject, getLogger('cubicweb.appobject')) diff -r 15d541321a8c -r 74c1597f8a82 common/__init__.py --- a/common/__init__.py Wed Jan 20 10:13:02 2010 +0100 +++ b/common/__init__.py Wed Jan 20 10:13:45 2010 +0100 @@ -7,47 +7,3 @@ :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ -from logilab.common.adbh import FunctionDescr - -from cubicweb._exceptions import * # bw compat - -from rql.utils import register_function, iter_funcnode_variables - -class COMMA_JOIN(FunctionDescr): - supported_backends = ('postgres', 'sqlite',) - rtype = 'String' - - @classmethod - def st_description(cls, funcnode, mainindex, tr): - return ', '.join(sorted(term.get_description(mainindex, tr) - for term in iter_funcnode_variables(funcnode))) - -register_function(COMMA_JOIN) # XXX do not expose? - - -class CONCAT_STRINGS(COMMA_JOIN): - aggregat = True - -register_function(CONCAT_STRINGS) # XXX bw compat - -class GROUP_CONCAT(CONCAT_STRINGS): - supported_backends = ('mysql', 'postgres', 'sqlite',) - -register_function(GROUP_CONCAT) - - -class LIMIT_SIZE(FunctionDescr): - supported_backends = ('postgres', 'sqlite',) - rtype = 'String' - - @classmethod - def st_description(cls, funcnode, mainindex, tr): - return funcnode.children[0].get_description(mainindex, tr) - -register_function(LIMIT_SIZE) - - -class TEXT_LIMIT_SIZE(LIMIT_SIZE): - supported_backends = ('mysql', 'postgres', 'sqlite',) - -register_function(TEXT_LIMIT_SIZE) diff -r 15d541321a8c -r 74c1597f8a82 common/appobject.py --- a/common/appobject.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -"""pre 3.2 bw compat - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -# pylint: disable-msg=W0614,W0401 -from warnings import warn -warn('moved to cubicweb.appobject', DeprecationWarning, stacklevel=2) -from cubicweb.appobject import * diff -r 15d541321a8c -r 74c1597f8a82 common/entity.py --- a/common/entity.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,12 +0,0 @@ -"""pre 3.2 bw compat - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -# pylint: disable-msg=W0614,W0401 -from warnings import warn -warn('moved to cubicweb.entity', DeprecationWarning, stacklevel=2) -from cubicweb.entity import * -from cubicweb.entity import _marker diff -r 15d541321a8c -r 74c1597f8a82 common/i18n.py --- a/common/i18n.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,99 +0,0 @@ -"""Some i18n/gettext utilities. - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -import re -import os -import sys -from os.path import join, basename, splitext, exists -from glob import glob - -from cubicweb.toolsutils import create_dir - -def extract_from_tal(files, output_file): - """extract i18n strings from tal and write them into the given output file - using standard python gettext marker (_) - """ - output = open(output_file, 'w') - for filepath in files: - for match in re.finditer('i18n:(content|replace)="([^"]+)"', open(filepath).read()): - print >> output, '_("%s")' % match.group(2) - output.close() - - -def add_msg(w, msgid, msgctx=None): - """write an empty pot msgid definition""" - if isinstance(msgid, unicode): - msgid = msgid.encode('utf-8') - if msgctx: - if isinstance(msgctx, unicode): - msgctx = msgctx.encode('utf-8') - w('msgctxt "%s"\n' % msgctx) - msgid = msgid.replace('"', r'\"').splitlines() - if len(msgid) > 1: - w('msgid ""\n') - for line in msgid: - w('"%s"' % line.replace('"', r'\"')) - else: - w('msgid "%s"\n' % msgid[0]) - w('msgstr ""\n\n') - - -def execute(cmd): - """display the command, execute it and raise an Exception if returned - status != 0 - """ - print cmd.replace(os.getcwd() + os.sep, '') - from subprocess import call - status = call(cmd, shell=True) - if status != 0: - raise Exception('status = %s' % status) - - -def available_catalogs(i18ndir=None): - if i18ndir is None: - wildcard = '*.po' - else: - wildcard = join(i18ndir, '*.po') - for popath in glob(wildcard): - lang = splitext(basename(popath))[0] - yield lang, popath - - -def compile_i18n_catalogs(sourcedirs, destdir, langs): - """generate .mo files for a set of languages into the `destdir` i18n directory - """ - from logilab.common.fileutils import ensure_fs_mode - print '-> compiling %s catalogs...' % destdir - errors = [] - for lang in langs: - langdir = join(destdir, lang, 'LC_MESSAGES') - if not exists(langdir): - create_dir(langdir) - pofiles = [join(path, '%s.po' % lang) for path in sourcedirs] - pofiles = [pof for pof in pofiles if exists(pof)] - mergedpo = join(destdir, '%s_merged.po' % lang) - try: - # merge instance/cubes messages catalogs with the stdlib's one - execute('msgcat --use-first --sort-output --strict -o "%s" %s' - % (mergedpo, ' '.join('"%s"' % f for f in pofiles))) - # make sure the .mo file is writeable and compiles with *msgfmt* - applmo = join(destdir, lang, 'LC_MESSAGES', 'cubicweb.mo') - try: - ensure_fs_mode(applmo) - except OSError: - pass # suppose not exists - execute('msgfmt "%s" -o "%s"' % (mergedpo, applmo)) - except Exception, ex: - errors.append('while handling language %s: %s' % (lang, ex)) - try: - # clean everything - os.unlink(mergedpo) - except Exception: - continue - return errors diff -r 15d541321a8c -r 74c1597f8a82 common/mail.py --- a/common/mail.py Wed Jan 20 10:13:02 2010 +0100 +++ b/common/mail.py Wed Jan 20 10:13:45 2010 +0100 @@ -1,272 +1,5 @@ -"""Common utilies to format / semd emails. - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -from base64 import b64encode, b64decode -from itertools import repeat -from time import time -from email.MIMEMultipart import MIMEMultipart -from email.MIMEText import MIMEText -from email.MIMEImage import MIMEImage -from email.Header import Header -try: - from socket import gethostname -except ImportError: - def gethostname(): # gae - return 'XXX' - -from cubicweb.view import EntityView -from cubicweb.entity import Entity - -def header(ustring): - return Header(ustring.encode('UTF-8'), 'UTF-8') - -def addrheader(uaddr, uname=None): - # even if an email address should be ascii, encode it using utf8 since - # automatic tests may generate non ascii email address - addr = uaddr.encode('UTF-8') - if uname: - return '%s <%s>' % (header(uname).encode(), addr) - return addr - - -def construct_message_id(appid, eid, withtimestamp=True): - if withtimestamp: - addrpart = 'eid=%s×tamp=%.10f' % (eid, time()) - else: - addrpart = 'eid=%s' % eid - # we don't want any equal sign nor trailing newlines - leftpart = b64encode(addrpart, '.-').rstrip().rstrip('=') - return '<%s@%s.%s>' % (leftpart, appid, gethostname()) - - -def parse_message_id(msgid, appid): - if msgid[0] == '<': - msgid = msgid[1:] - if msgid[-1] == '>': - msgid = msgid[:-1] - try: - values, qualif = msgid.split('@') - padding = len(values) % 4 - values = b64decode(str(values + '='*padding), '.-') - values = dict(v.split('=') for v in values.split('&')) - fromappid, host = qualif.split('.', 1) - except: - return None - if appid != fromappid or host != gethostname(): - return None - return values - - -def format_mail(uinfo, to_addrs, content, subject="", - cc_addrs=(), msgid=None, references=(), config=None): - """Sends an Email to 'e_addr' with content 'content', and subject 'subject' - - to_addrs and cc_addrs are expected to be a list of email address without - name - """ - assert type(content) is unicode, repr(content) - msg = MIMEText(content.encode('UTF-8'), 'plain', 'UTF-8') - # safety: keep only the first newline - subject = subject.splitlines()[0] - msg['Subject'] = header(subject) - if uinfo.get('email'): - email = uinfo['email'] - elif config and config['sender-addr']: - email = unicode(config['sender-addr']) - else: - email = u'' - if uinfo.get('name'): - name = uinfo['name'] - elif config and config['sender-addr']: - name = unicode(config['sender-name']) - else: - name = u'' - msg['From'] = addrheader(email, name) - if config and config['sender-addr'] and config['sender-addr'] != email: - appaddr = addrheader(config['sender-addr'], config['sender-name']) - msg['Reply-to'] = '%s, %s' % (msg['From'], appaddr) - elif email: - msg['Reply-to'] = msg['From'] - if config is not None: - msg['X-CW'] = config.appid - unique_addrs = lambda addrs: sorted(set(addr for addr in addrs if addr is not None)) - msg['To'] = ', '.join(addrheader(addr) for addr in unique_addrs(to_addrs)) - if cc_addrs: - msg['Cc'] = ', '.join(addrheader(addr) for addr in unique_addrs(cc_addrs)) - if msgid: - msg['Message-id'] = msgid - if references: - msg['References'] = ', '.join(references) - return msg - - -class HtmlEmail(MIMEMultipart): - - def __init__(self, subject, textcontent, htmlcontent, - sendermail=None, sendername=None, recipients=None, ccrecipients=None): - MIMEMultipart.__init__(self, 'related') - self['Subject'] = header(subject) - self.preamble = 'This is a multi-part message in MIME format.' - # Attach alternative text message - alternative = MIMEMultipart('alternative') - self.attach(alternative) - msgtext = MIMEText(textcontent.encode('UTF-8'), 'plain', 'UTF-8') - alternative.attach(msgtext) - # Attach html message - msghtml = MIMEText(htmlcontent.encode('UTF-8'), 'html', 'UTF-8') - alternative.attach(msghtml) - if sendermail or sendername: - self['From'] = addrheader(sendermail, sendername) - if recipients: - self['To'] = ', '.join(addrheader(addr) for addr in recipients if addr is not None) - if ccrecipients: - self['Cc'] = ', '.join(addrheader(addr) for addr in ccrecipients if addr is not None) - - def attach_image(self, data, htmlId): - image = MIMEImage(data) - image.add_header('Content-ID', '<%s>' % htmlId) - self.attach(image) - - -class NotificationView(EntityView): - """abstract view implementing the "email" API (eg to simplify sending - notification) - """ - # XXX refactor this class to work with len(rset) > 1 - - msgid_timestamp = True - - # this is usually the method to call - def render_and_send(self, **kwargs): - """generate and send an email message for this view""" - delayed = kwargs.pop('delay_to_commit', None) - for recipients, msg in self.render_emails(**kwargs): - if delayed is None: - self.send(recipients, msg) - elif delayed: - self.send_on_commit(recipients, msg) - else: - self.send_now(recipients, msg) - - def cell_call(self, row, col=0, **kwargs): - self.w(self.req._(self.content) % self.context(**kwargs)) - - def render_emails(self, **kwargs): - """generate and send emails for this view (one per recipient)""" - self._kwargs = kwargs - recipients = self.recipients() - if not recipients: - self.info('skipping %s notification, no recipients', self.id) - return - if self.rset is not None: - entity = self.entity(self.row or 0, self.col or 0) - # if the view is using timestamp in message ids, no way to reference - # previous email - if not self.msgid_timestamp: - refs = [self.construct_message_id(eid) - for eid in entity.notification_references(self)] - else: - refs = () - msgid = self.construct_message_id(entity.eid) - else: - refs = () - msgid = None - req = self.req - self.user_data = req.user_data() - origlang = req.lang - for something in recipients: - if isinstance(something, Entity): - # hi-jack self.req to get a session for the returned user - self.req = self.req.hijack_user(something) - emailaddr = something.get_email() - else: - emailaddr, lang = something - self.req.set_language(lang) - # since the same view (eg self) may be called multiple time and we - # need a fresh stream at each iteration, reset it explicitly - self.w = None - # XXX call render before subject to set .row/.col attributes on the - # view - try: - content = self.render(row=0, col=0, **kwargs) - subject = self.subject() - except SkipEmail: - continue - except Exception, ex: - # shouldn't make the whole transaction fail because of rendering - # error (unauthorized or such) - self.exception(str(ex)) - continue - msg = format_mail(self.user_data, [emailaddr], content, subject, - config=self.config, msgid=msgid, references=refs) - yield [emailaddr], msg - # restore language - req.set_language(origlang) - - # recipients / email sending ############################################### - - def recipients(self): - """return a list of either 2-uple (email, language) or user entity to - who this email should be sent - """ - # use super_session when available, we don't want to consider security - # when selecting recipients_finder - try: - req = self.req.super_session - except AttributeError: - req = self.req - finder = self.vreg['components'].select('recipients_finder', req, - rset=self.rset, - row=self.row or 0, - col=self.col or 0) - return finder.recipients() - - def send_now(self, recipients, msg): - self.config.sendmails([(msg, recipients)]) - - def send_on_commit(self, recipients, msg): - raise NotImplementedError - - send = send_now - - # email generation helpers ################################################# - - def construct_message_id(self, eid): - return construct_message_id(self.config.appid, eid, self.msgid_timestamp) - - def format_field(self, attr, value): - return ':%(attr)s: %(value)s' % {'attr': attr, 'value': value} - - def format_section(self, attr, value): - return '%(attr)s\n%(ul)s\n%(value)s\n' % { - 'attr': attr, 'ul': '-'*len(attr), 'value': value} - - def subject(self): - entity = self.entity(self.row or 0, self.col or 0) - subject = self.req._(self.message) - etype = entity.dc_type() - eid = entity.eid - login = self.user_data['login'] - return self.req._('%(subject)s %(etype)s #%(eid)s (%(login)s)') % locals() - - def context(self, **kwargs): - entity = self.entity(self.row or 0, self.col or 0) - for key, val in kwargs.iteritems(): - if val and isinstance(val, unicode) and val.strip(): - kwargs[key] = self.req._(val) - kwargs.update({'user': self.user_data['login'], - 'eid': entity.eid, - 'etype': entity.dc_type(), - 'url': entity.absolute_url(), - 'title': entity.dc_long_title(),}) - return kwargs - - -class SkipEmail(Exception): - """raise this if you decide to skip an email during its generation""" +"""pre 3.6 bw compat""" +# pylint: disable-msg=W0614,W0401 +from warnings import warn +warn('moved to cubicweb.mail', DeprecationWarning, stacklevel=2) +from cubicweb.mail import * diff -r 15d541321a8c -r 74c1597f8a82 common/migration.py --- a/common/migration.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,373 +0,0 @@ -"""utilities for instances migration - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -import sys -import os -import logging -import tempfile -from os.path import exists, join, basename, splitext - -from logilab.common.decorators import cached -from logilab.common.configuration import REQUIRED, read_old_config -from logilab.common.shellutils import ASK - -from cubicweb import ConfigurationError - - -def filter_scripts(config, directory, fromversion, toversion, quiet=True): - """return a list of paths of migration files to consider to upgrade - from a version to a greater one - """ - from logilab.common.changelog import Version # doesn't work with appengine - assert fromversion - assert toversion - assert isinstance(fromversion, tuple), fromversion.__class__ - assert isinstance(toversion, tuple), toversion.__class__ - assert fromversion <= toversion, (fromversion, toversion) - if not exists(directory): - if not quiet: - print directory, "doesn't exists, no migration path" - return [] - if fromversion == toversion: - return [] - result = [] - for fname in os.listdir(directory): - if fname.endswith('.pyc') or fname.endswith('.pyo') \ - or fname.endswith('~'): - continue - fpath = join(directory, fname) - try: - tver, mode = fname.split('_', 1) - except ValueError: - continue - mode = mode.split('.', 1)[0] - if not config.accept_mode(mode): - continue - try: - tver = Version(tver) - except ValueError: - continue - if tver <= fromversion: - continue - if tver > toversion: - continue - result.append((tver, fpath)) - # be sure scripts are executed in order - return sorted(result) - - -IGNORED_EXTENSIONS = ('.swp', '~') - - -def execscript_confirm(scriptpath): - """asks for confirmation before executing a script and provides the - ability to show the script's content - """ - while True: - answer = ASK.ask('Execute %r ?' % scriptpath, ('Y','n','show'), 'Y') - if answer == 'n': - return False - elif answer == 'show': - stream = open(scriptpath) - scriptcontent = stream.read() - stream.close() - print - print scriptcontent - print - else: - return True - -def yes(*args, **kwargs): - return True - - -class MigrationHelper(object): - """class holding CubicWeb Migration Actions used by migration scripts""" - - def __init__(self, config, interactive=True, verbosity=1): - self.config = config - if config: - # no config on shell to a remote instance - self.config.init_log(logthreshold=logging.ERROR, debug=True) - # 0: no confirmation, 1: only main commands confirmed, 2 ask for everything - self.verbosity = verbosity - self.need_wrap = True - if not interactive or not verbosity: - self.confirm = yes - self.execscript_confirm = yes - else: - self.execscript_confirm = execscript_confirm - self._option_changes = [] - self.__context = {'confirm': self.confirm, - 'config': self.config, - 'interactive_mode': interactive, - } - - def __getattribute__(self, name): - try: - return object.__getattribute__(self, name) - except AttributeError: - cmd = 'cmd_%s' % name - if hasattr(self, cmd): - meth = getattr(self, cmd) - return lambda *args, **kwargs: self.interact(args, kwargs, - meth=meth) - raise - raise AttributeError(name) - - def repo_connect(self): - return self.config.repository() - - def migrate(self, vcconf, toupgrade, options): - """upgrade the given set of cubes - - `cubes` is an ordered list of 3-uple: - (cube, fromversion, toversion) - """ - if options.fs_only: - # monkey path configuration.accept_mode so database mode (e.g. Any) - # won't be accepted - orig_accept_mode = self.config.accept_mode - def accept_mode(mode): - if mode == 'Any': - return False - return orig_accept_mode(mode) - self.config.accept_mode = accept_mode - # may be an iterator - toupgrade = tuple(toupgrade) - vmap = dict( (cube, (fromver, tover)) for cube, fromver, tover in toupgrade) - ctx = self.__context - ctx['versions_map'] = vmap - if self.config.accept_mode('Any') and 'cubicweb' in vmap: - migrdir = self.config.migration_scripts_dir() - self.cmd_process_script(join(migrdir, 'bootstrapmigration_repository.py')) - for cube, fromversion, toversion in toupgrade: - if cube == 'cubicweb': - migrdir = self.config.migration_scripts_dir() - else: - migrdir = self.config.cube_migration_scripts_dir(cube) - scripts = filter_scripts(self.config, migrdir, fromversion, toversion) - if scripts: - prevversion = None - for version, script in scripts: - # take care to X.Y.Z_Any.py / X.Y.Z_common.py: we've to call - # cube_upgraded once all script of X.Y.Z have been executed - if prevversion is not None and version != prevversion: - self.cube_upgraded(cube, prevversion) - prevversion = version - self.cmd_process_script(script) - self.cube_upgraded(cube, toversion) - else: - self.cube_upgraded(cube, toversion) - - def cube_upgraded(self, cube, version): - pass - - def shutdown(self): - pass - - def interact(self, args, kwargs, meth): - """execute the given method according to user's confirmation""" - msg = 'Execute command: %s(%s) ?' % ( - meth.__name__[4:], - ', '.join([repr(arg) for arg in args] + - ['%s=%r' % (n,v) for n,v in kwargs.items()])) - if 'ask_confirm' in kwargs: - ask_confirm = kwargs.pop('ask_confirm') - else: - ask_confirm = True - if not ask_confirm or self.confirm(msg): - return meth(*args, **kwargs) - - def confirm(self, question, shell=True, abort=True, retry=False, default='y'): - """ask for confirmation and return true on positive answer - - if `retry` is true the r[etry] answer may return 2 - """ - possibleanswers = ['y','n'] - if abort: - possibleanswers.append('abort') - if shell: - possibleanswers.append('shell') - if retry: - possibleanswers.append('retry') - try: - answer = ASK.ask(question, possibleanswers, default) - except (EOFError, KeyboardInterrupt): - answer = 'abort' - if answer == 'n': - return False - if answer == 'retry': - return 2 - if answer == 'abort': - raise SystemExit(1) - if shell and answer == 'shell': - self.interactive_shell() - return self.confirm(question) - return True - - def interactive_shell(self): - self.confirm = yes - self.need_wrap = False - # avoid '_' to be added to builtins by sys.display_hook - def do_not_add___to_builtins(obj): - if obj is not None: - print repr(obj) - sys.displayhook = do_not_add___to_builtins - local_ctx = self._create_context() - try: - import readline - from rlcompleter import Completer - except ImportError: - # readline not available - pass - else: - readline.set_completer(Completer(local_ctx).complete) - readline.parse_and_bind('tab: complete') - home_key = 'HOME' - if sys.platform == 'win32': - home_key = 'USERPROFILE' - histfile = os.path.join(os.environ[home_key], ".eshellhist") - try: - readline.read_history_file(histfile) - except IOError: - pass - from code import interact - banner = """entering the migration python shell -just type migration commands or arbitrary python code and type ENTER to execute it -type "exit" or Ctrl-D to quit the shell and resume operation""" - # give custom readfunc to avoid http://bugs.python.org/issue1288615 - def unicode_raw_input(prompt): - return unicode(raw_input(prompt), sys.stdin.encoding) - interact(banner, readfunc=unicode_raw_input, local=local_ctx) - readline.write_history_file(histfile) - # delete instance's confirm attribute to avoid questions - del self.confirm - self.need_wrap = True - - @cached - def _create_context(self): - """return a dictionary to use as migration script execution context""" - context = self.__context - for attr in dir(self): - if attr.startswith('cmd_'): - if self.need_wrap: - context[attr[4:]] = getattr(self, attr[4:]) - else: - context[attr[4:]] = getattr(self, attr) - return context - - def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs): - """execute a migration script - in interactive mode, display the migration script path, ask for - confirmation and execute it if confirmed - """ - migrscript = os.path.normpath(migrscript) - if migrscript.endswith('.py'): - script_mode = 'python' - elif migrscript.endswith('.txt') or migrscript.endswith('.rst'): - script_mode = 'doctest' - else: - raise Exception('This is not a valid cubicweb shell input') - if not self.execscript_confirm(migrscript): - return - scriptlocals = self._create_context().copy() - if script_mode == 'python': - if funcname is None: - pyname = '__main__' - else: - pyname = splitext(basename(migrscript))[0] - scriptlocals.update({'__file__': migrscript, '__name__': pyname}) - execfile(migrscript, scriptlocals) - if funcname is not None: - try: - func = scriptlocals[funcname] - self.info('found %s in locals', funcname) - assert callable(func), '%s (%s) is not callable' % (func, funcname) - except KeyError: - self.critical('no %s in script %s', funcname, migrscript) - return None - return func(*args, **kwargs) - else: # script_mode == 'doctest' - import doctest - doctest.testfile(migrscript, module_relative=False, - optionflags=doctest.ELLIPSIS, globs=scriptlocals) - - def cmd_option_renamed(self, oldname, newname): - """a configuration option has been renamed""" - self._option_changes.append(('renamed', oldname, newname)) - - def cmd_option_group_change(self, option, oldgroup, newgroup): - """a configuration option has been moved in another group""" - self._option_changes.append(('moved', option, oldgroup, newgroup)) - - def cmd_option_added(self, optname): - """a configuration option has been added""" - self._option_changes.append(('added', optname)) - - def cmd_option_removed(self, optname): - """a configuration option has been removed""" - # can safely be ignored - #self._option_changes.append(('removed', optname)) - - def cmd_option_type_changed(self, optname, oldtype, newvalue): - """a configuration option's type has changed""" - self._option_changes.append(('typechanged', optname, oldtype, newvalue)) - - def cmd_add_cubes(self, cubes): - """modify the list of used cubes in the in-memory config - returns newly inserted cubes, including dependencies - """ - if isinstance(cubes, basestring): - cubes = (cubes,) - origcubes = self.config.cubes() - newcubes = [p for p in self.config.expand_cubes(cubes) - if not p in origcubes] - if newcubes: - for cube in cubes: - assert cube in newcubes - self.config.add_cubes(newcubes) - return newcubes - - def cmd_remove_cube(self, cube, removedeps=False): - if removedeps: - toremove = self.config.expand_cubes([cube]) - else: - toremove = (cube,) - origcubes = self.config._cubes - basecubes = [c for c in origcubes if not c in toremove] - self.config._cubes = tuple(self.config.expand_cubes(basecubes)) - removed = [p for p in origcubes if not p in self.config._cubes] - if not cube in removed: - raise ConfigurationError("can't remove cube %s, " - "used as a dependency" % cube) - return removed - - def rewrite_configuration(self): - # import locally, show_diffs unavailable in gae environment - from cubicweb.toolsutils import show_diffs - configfile = self.config.main_config_file() - if self._option_changes: - read_old_config(self.config, self._option_changes, configfile) - fd, newconfig = tempfile.mkstemp() - for optdescr in self._option_changes: - if optdescr[0] == 'added': - optdict = self.config.get_option_def(optdescr[1]) - if optdict.get('default') is REQUIRED: - self.config.input_option(optdescr[1], optdict) - self.config.generate_config(open(newconfig, 'w')) - show_diffs(configfile, newconfig) - os.close(fd) - if exists(newconfig): - os.unlink(newconfig) - - -from logging import getLogger -from cubicweb import set_log_methods -set_log_methods(MigrationHelper, getLogger('cubicweb.migration')) diff -r 15d541321a8c -r 74c1597f8a82 common/mixins.py --- a/common/mixins.py Wed Jan 20 10:13:02 2010 +0100 +++ b/common/mixins.py Wed Jan 20 10:13:45 2010 +0100 @@ -1,315 +1,5 @@ -"""mixins of entity/views organized somewhat in a graph or tree structure - - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -from logilab.common.deprecation import deprecated -from logilab.common.decorators import cached - -from cubicweb import typed_eid -from cubicweb.selectors import implements -from cubicweb.interfaces import IEmailable, ITree - - -class TreeMixIn(object): - """base tree-mixin providing the tree interface - - This mixin has to be inherited explicitly and configured using the - tree_attribute, parent_target and children_target class attribute to - benefit from this default implementation - """ - tree_attribute = None - # XXX misnamed - parent_target = 'subject' - children_target = 'object' - - def different_type_children(self, entities=True): - """return children entities of different type as this entity. - - according to the `entities` parameter, return entity objects or the - equivalent result set - """ - res = self.related(self.tree_attribute, self.children_target, - entities=entities) - if entities: - return [e for e in res if e.e_schema != self.e_schema] - return res.filtered_rset(lambda x: x.e_schema != self.e_schema, self.col) - - def same_type_children(self, entities=True): - """return children entities of the same type as this entity. - - according to the `entities` parameter, return entity objects or the - equivalent result set - """ - res = self.related(self.tree_attribute, self.children_target, - entities=entities) - if entities: - return [e for e in res if e.e_schema == self.e_schema] - return res.filtered_rset(lambda x: x.e_schema == self.e_schema, self.col) - - def iterchildren(self, _done=None): - if _done is None: - _done = set() - for child in self.children(): - if child.eid in _done: - self.error('loop in %s tree', self.id.lower()) - continue - yield child - _done.add(child.eid) - - def prefixiter(self, _done=None): - if _done is None: - _done = set() - if self.eid in _done: - return - yield self - _done.add(self.eid) - for child in self.iterchildren(_done): - try: - for entity in child.prefixiter(_done): - yield entity - except AttributeError: - pass - - @cached - def path(self): - """returns the list of eids from the root object to this object""" - path = [] - parent = self - while parent: - if parent.eid in path: - self.error('loop in %s tree', self.id.lower()) - break - path.append(parent.eid) - try: - # check we are not leaving the tree - if (parent.tree_attribute != self.tree_attribute or - parent.parent_target != self.parent_target): - break - parent = parent.parent() - except AttributeError: - break - - path.reverse() - return path - - def iterparents(self): - def _uptoroot(self): - curr = self - while True: - curr = curr.parent() - if curr is None: - break - yield curr - return _uptoroot(self) - - def notification_references(self, view): - """used to control References field of email send on notification - for this entity. `view` is the notification view. - - Should return a list of eids which can be used to generate message ids - of previously sent email - """ - return self.path()[:-1] - - - ## ITree interface ######################################################## - def parent(self): - """return the parent entity if any, else None (e.g. if we are on the - root - """ - try: - return self.related(self.tree_attribute, self.parent_target, - entities=True)[0] - except (KeyError, IndexError): - return None - - def children(self, entities=True, sametype=False): - """return children entities - - according to the `entities` parameter, return entity objects or the - equivalent result set - """ - if sametype: - return self.same_type_children(entities) - else: - return self.related(self.tree_attribute, self.children_target, - entities=entities) - - def children_rql(self): - return self.related_rql(self.tree_attribute, self.children_target) - - def is_leaf(self): - return len(self.children()) == 0 - - def is_root(self): - return self.parent() is None - - def root(self): - """return the root object""" - return self.req.entity_from_eid(self.path()[0]) - - -class EmailableMixIn(object): - """base mixin providing the default get_email() method used by - the massmailing view - - NOTE: The default implementation is based on the - primary_email / use_email scheme - """ - __implements__ = (IEmailable,) - - def get_email(self): - if getattr(self, 'primary_email', None): - return self.primary_email[0].address - if getattr(self, 'use_email', None): - return self.use_email[0].address - return None - - @classmethod - def allowed_massmail_keys(cls): - """returns a set of allowed email substitution keys - - The default is to return the entity's attribute list but an - entity class might override this method to allow extra keys. - For instance, the Person class might want to return a `companyname` - key. - """ - return set(rschema.type - for rschema, attrtype in cls.e_schema.attribute_definitions() - if attrtype.type not in ('Password', 'Bytes')) - - def as_email_context(self): - """returns the dictionary as used by the sendmail controller to - build email bodies. - - NOTE: the dictionary keys should match the list returned by the - `allowed_massmail_keys` method. - """ - return dict( (attr, getattr(self, attr)) for attr in self.allowed_massmail_keys() ) - - -"""pluggable mixins system: plug classes registered in MI_REL_TRIGGERS on entity -classes which have the relation described by the dict's key. - -NOTE: pluggable mixins can't override any method of the 'explicit' user classes tree -(eg without plugged classes). This includes bases Entity and AnyEntity classes. -""" -MI_REL_TRIGGERS = { - ('primary_email', 'subject'): EmailableMixIn, - ('use_email', 'subject'): EmailableMixIn, - } - - - -def _done_init(done, view, row, col): - """handle an infinite recursion safety belt""" - if done is None: - done = set() - entity = view.entity(row, col) - if entity.eid in done: - msg = entity.req._('loop in %(rel)s relation (%(eid)s)') % { - 'rel': entity.tree_attribute, - 'eid': entity.eid - } - return None, msg - done.add(entity.eid) - return done, entity - - -class TreeViewMixIn(object): - """a recursive tree view""" - id = 'tree' - item_vid = 'treeitem' - __select__ = implements(ITree) - - def call(self, done=None, **kwargs): - if done is None: - done = set() - super(TreeViewMixIn, self).call(done=done, **kwargs) - - def cell_call(self, row, col=0, vid=None, done=None, **kwargs): - done, entity = _done_init(done, self, row, col) - if done is None: - # entity is actually an error message - self.w(u'
  • %s
  • ' % entity) - return - self.open_item(entity) - entity.view(vid or self.item_vid, w=self.w, **kwargs) - relatedrset = entity.children(entities=False) - self.wview(self.id, relatedrset, 'null', done=done, **kwargs) - self.close_item(entity) - - def open_item(self, entity): - self.w(u'
  • \n' % entity.id.lower()) - def close_item(self, entity): - self.w(u'
  • \n') - - -class TreePathMixIn(object): - """a recursive path view""" - id = 'path' - item_vid = 'oneline' - separator = u' > ' - - def call(self, **kwargs): - self.w(u'
    ') - super(TreePathMixIn, self).call(**kwargs) - self.w(u'
    ') - - def cell_call(self, row, col=0, vid=None, done=None, **kwargs): - done, entity = _done_init(done, self, row, col) - if done is None: - # entity is actually an error message - self.w(u'%s' % entity) - return - parent = entity.parent() - if parent: - parent.view(self.id, w=self.w, done=done) - self.w(self.separator) - entity.view(vid or self.item_vid, w=self.w) - - -class ProgressMixIn(object): - """provide default implementations for IProgress interface methods""" - # This is an adapter isn't it ? - - @property - def cost(self): - return self.progress_info()['estimated'] - - @property - def revised_cost(self): - return self.progress_info().get('estimatedcorrected', self.cost) - - @property - def done(self): - return self.progress_info()['done'] - - @property - def todo(self): - return self.progress_info()['todo'] - - @cached - def progress_info(self): - raise NotImplementedError() - - def finished(self): - return not self.in_progress() - - def in_progress(self): - raise NotImplementedError() - - def progress(self): - try: - return 100. * self.done / self.revised_cost - except ZeroDivisionError: - # total cost is 0 : if everything was estimated, task is completed - if self.progress_info().get('notestimated'): - return 0. - return 100 +"""pre 3.6 bw compat""" +# pylint: disable-msg=W0614,W0401 +from warnings import warn +warn('moved to cubicweb.mixins', DeprecationWarning, stacklevel=2) +from cubicweb.mixins import * diff -r 15d541321a8c -r 74c1597f8a82 common/mttransforms.py --- a/common/mttransforms.py Wed Jan 20 10:13:02 2010 +0100 +++ b/common/mttransforms.py Wed Jan 20 10:13:45 2010 +0100 @@ -1,93 +1,5 @@ -"""mime type transformation engine for cubicweb, based on mtconverter - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -from logilab import mtconverter - -from logilab.mtconverter.engine import TransformEngine -from logilab.mtconverter.transform import Transform -from logilab.mtconverter import (register_base_transforms, - register_pil_transforms, - register_pygments_transforms) - -from cubicweb.common.uilib import rest_publish, html_publish - -HTML_MIMETYPES = ('text/html', 'text/xhtml', 'application/xhtml+xml') - -# CubicWeb specific transformations - -class rest_to_html(Transform): - inputs = ('text/rest', 'text/x-rst') - output = 'text/html' - def _convert(self, trdata): - return rest_publish(trdata.appobject, trdata.decode()) - -class html_to_html(Transform): - inputs = HTML_MIMETYPES - output = 'text/html' - def _convert(self, trdata): - return html_publish(trdata.appobject, trdata.data) - - -# Instantiate and configure the transformation engine - -mtconverter.UNICODE_POLICY = 'replace' - -ENGINE = TransformEngine() -ENGINE.add_transform(rest_to_html()) -ENGINE.add_transform(html_to_html()) - -try: - from cubicweb.ext.tal import compile_template -except ImportError: - HAS_TAL = False - from cubicweb import schema - schema.NEED_PERM_FORMATS.remove('text/cubicweb-page-template') - -else: - HAS_TAL = True - - class ept_to_html(Transform): - inputs = ('text/cubicweb-page-template',) - output = 'text/html' - output_encoding = 'utf-8' - def _convert(self, trdata): - value = trdata.encode(self.output_encoding) - return trdata.appobject.tal_render(compile_template(value), {}) - - ENGINE.add_transform(ept_to_html()) - -if register_pil_transforms(ENGINE, verb=False): - HAS_PIL_TRANSFORMS = True -else: - HAS_PIL_TRANSFORMS = False - -try: - from logilab.mtconverter.transforms import pygmentstransforms - for mt in ('text/plain',) + HTML_MIMETYPES: - try: - pygmentstransforms.mimetypes.remove(mt) - except ValueError: - continue - register_pygments_transforms(ENGINE, verb=False) - - def patch_convert(cls): - def _convert(self, trdata, origconvert=cls._convert): - try: - trdata.appobject.req.add_css('pygments.css') - except AttributeError: # session has no add_css, only http request - pass - return origconvert(self, trdata) - cls._convert = _convert - patch_convert(pygmentstransforms.PygmentsHTMLTransform) - - HAS_PYGMENTS_TRANSFORMS = True -except ImportError: - HAS_PYGMENTS_TRANSFORMS = False - -register_base_transforms(ENGINE, verb=False) +"""pre 3.6 bw compat""" +# pylint: disable-msg=W0614,W0401 +from warnings import warn +warn('moved to cubicweb.mttransforms', DeprecationWarning, stacklevel=2) +from cubicweb.mttransforms import * diff -r 15d541321a8c -r 74c1597f8a82 common/schema.py --- a/common/schema.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -"""pre 3.0 bw compat - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -# pylint: disable-msg=W0614,W0401 -from warnings import warn -warn('moved to cubicweb.schema', DeprecationWarning, stacklevel=2) -from cubicweb.schema import * diff -r 15d541321a8c -r 74c1597f8a82 common/selectors.py --- a/common/selectors.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,12 +0,0 @@ -"""pre 3.2 bw compat - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -# pylint: disable-msg=W0614,W0401 -from warnings import warn -warn('moved to cubicweb.selectors', DeprecationWarning, stacklevel=2) -from cubicweb.selectors import * -from cubicweb.selectors import _rql_condition diff -r 15d541321a8c -r 74c1597f8a82 common/tags.py --- a/common/tags.py Wed Jan 20 10:13:02 2010 +0100 +++ b/common/tags.py Wed Jan 20 10:13:45 2010 +0100 @@ -1,49 +1,5 @@ -"""helper classes to generate simple (X)HTML tags - -:organization: Logilab -:copyright: 2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -from cubicweb.common.uilib import simple_sgml_tag, sgml_attributes - -class tag(object): - def __init__(self, name, escapecontent=True): - self.name = name - self.escapecontent = escapecontent - - def __call__(self, __content=None, **attrs): - attrs.setdefault('escapecontent', self.escapecontent) - return simple_sgml_tag(self.name, __content, **attrs) - -button = tag('button') -input = tag('input') -textarea = tag('textarea') -a = tag('a') -span = tag('span') -div = tag('div', False) -img = tag('img') -label = tag('label') -option = tag('option') -h1 = tag('h1') -h2 = tag('h2') -h3 = tag('h3') -h4 = tag('h4') -h5 = tag('h5') -tr = tag('tr') -th = tag('th') -td = tag('td') - -def select(name, id=None, multiple=False, options=[], **attrs): - if multiple: - attrs['multiple'] = 'multiple' - if id: - attrs['id'] = id - attrs['name'] = name - html = [u'') - return u'\n'.join(html) - +"""pre 3.6 bw compat""" +# pylint: disable-msg=W0614,W0401 +from warnings import warn +warn('moved to cubicweb.tags', DeprecationWarning, stacklevel=2) +from cubicweb.tags import * diff -r 15d541321a8c -r 74c1597f8a82 common/test/data/bootstrap_cubes --- a/common/test/data/bootstrap_cubes Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ - diff -r 15d541321a8c -r 74c1597f8a82 common/test/data/migration/0.0.3_Any.py --- a/common/test/data/migration/0.0.3_Any.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,8 +0,0 @@ -""" - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -coucou diff -r 15d541321a8c -r 74c1597f8a82 common/test/data/migration/0.0.4_Any.py --- a/common/test/data/migration/0.0.4_Any.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,8 +0,0 @@ -""" - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -coucou diff -r 15d541321a8c -r 74c1597f8a82 common/test/data/migration/0.1.0_Any.py --- a/common/test/data/migration/0.1.0_Any.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,8 +0,0 @@ -""" - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -coucou diff -r 15d541321a8c -r 74c1597f8a82 common/test/data/migration/0.1.0_common.py --- a/common/test/data/migration/0.1.0_common.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -"""common to all configuration - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" diff -r 15d541321a8c -r 74c1597f8a82 common/test/data/migration/0.1.0_repository.py --- a/common/test/data/migration/0.1.0_repository.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -"""repository specific - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" diff -r 15d541321a8c -r 74c1597f8a82 common/test/data/migration/0.1.0_web.py --- a/common/test/data/migration/0.1.0_web.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -"""web only - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" diff -r 15d541321a8c -r 74c1597f8a82 common/test/data/migration/0.1.2_Any.py --- a/common/test/data/migration/0.1.2_Any.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,8 +0,0 @@ -""" - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -coucou diff -r 15d541321a8c -r 74c1597f8a82 common/test/data/migration/depends.map --- a/common/test/data/migration/depends.map Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +0,0 @@ -0.0.2: 2.3.0 -0.0.3: 2.4.0 -# missing 0.0.4 entry, that's alright -0.1.0: 2.6.0 -0.1.2: 2.10.0 diff -r 15d541321a8c -r 74c1597f8a82 common/test/data/server_migration/2.10.2_Any.sql diff -r 15d541321a8c -r 74c1597f8a82 common/test/data/server_migration/2.5.0_Any.sql diff -r 15d541321a8c -r 74c1597f8a82 common/test/data/server_migration/2.6.0_Any.sql diff -r 15d541321a8c -r 74c1597f8a82 common/test/data/server_migration/bootstrapmigration_repository.py --- a/common/test/data/server_migration/bootstrapmigration_repository.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -"""allways executed before all others in server migration - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" diff -r 15d541321a8c -r 74c1597f8a82 common/test/unittest_mail.py --- a/common/test/unittest_mail.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -"""unit tests for module cubicweb.common.mail - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" - -import os -import sys - -from logilab.common.testlib import unittest_main -from logilab.common.umessage import message_from_string - -from cubicweb.devtools.apptest import EnvBasedTC -from cubicweb.common.mail import format_mail - - -def getlogin(): - """avoid usinng os.getlogin() because of strange tty / stdin problems - (man 3 getlogin) - Another solution would be to use $LOGNAME, $USER or $USERNAME - """ - if sys.platform != 'win32': - import pwd - return pwd.getpwuid(os.getuid())[0] - else: - return os.environ.get('USERNAME') - - -class EmailTC(EnvBasedTC): - - def test_format_mail(self): - self.set_option('sender-addr', 'bim@boum.fr') - self.set_option('sender-name', 'BimBam') - - mail = format_mail({'name': 'oim', 'email': 'oim@logilab.fr'}, - ['test@logilab.fr'], u'un petit cöucou', u'bïjour', - config=self.config) - self.assertLinesEquals(mail.as_string(), """\ -MIME-Version: 1.0 -Content-Type: text/plain; charset="utf-8" -Content-Transfer-Encoding: base64 -Subject: =?utf-8?q?b=C3=AFjour?= -From: =?utf-8?q?oim?= -Reply-to: =?utf-8?q?oim?= , =?utf-8?q?BimBam?= -X-CW: data -To: test@logilab.fr - -dW4gcGV0aXQgY8O2dWNvdQ== -""") - msg = message_from_string(mail.as_string()) - self.assertEquals(msg.get('subject'), u'bïjour') - self.assertEquals(msg.get('from'), u'oim ') - self.assertEquals(msg.get('to'), u'test@logilab.fr') - self.assertEquals(msg.get('reply-to'), u'oim , BimBam ') - self.assertEquals(msg.get_payload(decode=True), u'un petit cöucou') - - - def test_format_mail_euro(self): - mail = format_mail({'name': u'oîm', 'email': u'oim@logilab.fr'}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €') - self.assertLinesEquals(mail.as_string(), """\ -MIME-Version: 1.0 -Content-Type: text/plain; charset="utf-8" -Content-Transfer-Encoding: base64 -Subject: =?utf-8?b?YsOvam91ciDigqw=?= -From: =?utf-8?q?o=C3=AEm?= -Reply-to: =?utf-8?q?o=C3=AEm?= -To: test@logilab.fr - -dW4gcGV0aXQgY8O2dWNvdSDigqw= -""") - msg = message_from_string(mail.as_string()) - self.assertEquals(msg.get('subject'), u'bïjour €') - self.assertEquals(msg.get('from'), u'oîm ') - self.assertEquals(msg.get('to'), u'test@logilab.fr') - self.assertEquals(msg.get('reply-to'), u'oîm ') - self.assertEquals(msg.get_payload(decode=True), u'un petit cöucou €') - - - def test_format_mail_from_reply_to(self): - # no sender-name, sender-addr in the configuration - self.set_option('sender-name', '') - self.set_option('sender-addr', '') - msg = format_mail({'name': u'', 'email': u''}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', - config=self.config) - self.assertEquals(msg.get('from'), u'') - self.assertEquals(msg.get('reply-to'), None) - msg = format_mail({'name': u'tutu', 'email': u'tutu@logilab.fr'}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', - config=self.config) - msg = message_from_string(msg.as_string()) - self.assertEquals(msg.get('from'), u'tutu ') - self.assertEquals(msg.get('reply-to'), u'tutu ') - msg = format_mail({'name': u'tutu', 'email': u'tutu@logilab.fr'}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €') - msg = message_from_string(msg.as_string()) - self.assertEquals(msg.get('from'), u'tutu ') - self.assertEquals(msg.get('reply-to'), u'tutu ') - # set sender name and address as expected - self.set_option('sender-name', 'cubicweb-test') - self.set_option('sender-addr', 'cubicweb-test@logilab.fr') - # anonymous notification: no name and no email specified - msg = format_mail({'name': u'', 'email': u''}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', - config=self.config) - msg = message_from_string(msg.as_string()) - self.assertEquals(msg.get('from'), u'cubicweb-test ') - self.assertEquals(msg.get('reply-to'), u'cubicweb-test ') - # anonymous notification: only email specified - msg = format_mail({'email': u'tutu@logilab.fr'}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', - config=self.config) - msg = message_from_string(msg.as_string()) - self.assertEquals(msg.get('from'), u'cubicweb-test ') - self.assertEquals(msg.get('reply-to'), u'cubicweb-test , cubicweb-test ') - # anonymous notification: only name specified - msg = format_mail({'name': u'tutu'}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', - config=self.config) - msg = message_from_string(msg.as_string()) - self.assertEquals(msg.get('from'), u'tutu ') - self.assertEquals(msg.get('reply-to'), u'tutu ') - - - -if __name__ == '__main__': - unittest_main() - diff -r 15d541321a8c -r 74c1597f8a82 common/test/unittest_migration.py --- a/common/test/unittest_migration.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,106 +0,0 @@ -"""cubicweb.common.migration unit tests - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" - -from os.path import abspath -from logilab.common.testlib import TestCase, unittest_main - -from cubicweb.devtools import TestServerConfiguration -from cubicweb.devtools.apptest import TestEnvironment - -from cubicweb.cwconfig import CubicWebConfiguration -from cubicweb.common.migration import MigrationHelper, filter_scripts -from cubicweb.server.migractions import ServerMigrationHelper - - -class Schema(dict): - def has_entity(self, e_type): - return self.has_key(e_type) - -SMIGRDIR = abspath('data/server_migration') + '/' -TMIGRDIR = abspath('data/migration') + '/' - -class MigrTestConfig(TestServerConfiguration): - verbosity = 0 - def migration_scripts_dir(cls): - return SMIGRDIR - - def cube_migration_scripts_dir(cls, cube): - return TMIGRDIR - -class MigrationToolsTC(TestCase): - def setUp(self): - self.config = MigrTestConfig('data') - from yams.schema import Schema - self.config.load_schema = lambda expand_cubes=False: Schema('test') - self.config.__class__.cubicweb_appobject_path = frozenset() - self.config.__class__.cube_appobject_path = frozenset() - - def test_filter_scripts_base(self): - self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,3,0), (2,4,0)), - []) - self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,5,0)), - [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql')]) - self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,6,0)), - [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')]) - self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,6,0)), - [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql'), - ((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')]) - self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,5,1)), - []) - self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,10,2)), - [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql'), - ((2, 10, 2), SMIGRDIR+'2.10.2_Any.sql')]) - self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,1), (2,6,0)), - [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')]) - - self.assertListEquals(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,3)), - [((0, 0, 3), TMIGRDIR+'0.0.3_Any.py')]) - self.assertListEquals(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,4)), - [((0, 0, 3), TMIGRDIR+'0.0.3_Any.py'), - ((0, 0, 4), TMIGRDIR+'0.0.4_Any.py')]) - - def test_filter_scripts_for_mode(self): - self.assertIsInstance(self.config.migration_handler(), ServerMigrationHelper) - config = CubicWebConfiguration('data') - config.verbosity = 0 - self.assert_(not isinstance(config.migration_handler(), ServerMigrationHelper)) - self.assertIsInstance(config.migration_handler(), MigrationHelper) - config = self.config - config.__class__.name = 'twisted' - self.assertListEquals(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), - [((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'), - ((0, 1 ,0), TMIGRDIR+'0.1.0_web.py')]) - config.__class__.name = 'repository' - self.assertListEquals(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), - [((0, 1 ,0), TMIGRDIR+'0.1.0_Any.py'), - ((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'), - ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py')]) - config.__class__.name = 'all-in-one' - self.assertListEquals(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), - [((0, 1 ,0), TMIGRDIR+'0.1.0_Any.py'), - ((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'), - ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py'), - ((0, 1 ,0), TMIGRDIR+'0.1.0_web.py')]) - config.__class__.name = 'repository' - - -from cubicweb.devtools import ApptestConfiguration, init_test_database, cleanup_sqlite - -class BaseCreationTC(TestCase): - - def test_db_creation(self): - """make sure database can be created""" - config = ApptestConfiguration('data') - source = config.sources()['system'] - self.assertEquals(source['db-driver'], 'sqlite') - cleanup_sqlite(source['db-name'], removecube=True) - init_test_database(driver=source['db-driver'], config=config) - - -if __name__ == '__main__': - unittest_main() diff -r 15d541321a8c -r 74c1597f8a82 common/test/unittest_uilib.py --- a/common/test/unittest_uilib.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- -"""unittests for cubicweb.common.uilib - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" - -__docformat__ = "restructuredtext en" - -from logilab.common.testlib import TestCase, unittest_main -from logilab.common.tree import Node - -from cubicweb.common import uilib - -class UILIBTC(TestCase): - - def test_remove_tags(self): - """make sure remove_tags remove all tags""" - data = [ - ('

    Hello

    ', 'Hello'), - ('

    Hello spam

    ', 'Hello spam'), - ('
    Hello', 'Hello'), - ('

    ', ''), - ] - for text, expected in data: - got = uilib.remove_html_tags(text) - self.assertEquals(got, expected) - - def test_fallback_safe_cut(self): - self.assertEquals(uilib.fallback_safe_cut(u'ab cd', 4), u'ab c...') - self.assertEquals(uilib.fallback_safe_cut(u'ab cd', 5), u'ab cd') - self.assertEquals(uilib.fallback_safe_cut(u'ab &d', 4), u'ab &...') - self.assertEquals(uilib.fallback_safe_cut(u'ab &d ef', 5), u'ab &d...') - self.assertEquals(uilib.fallback_safe_cut(u'ab ìd', 4), u'ab ì...') - self.assertEquals(uilib.fallback_safe_cut(u'& &d ef', 4), u'& &d...') - - def test_lxml_safe_cut(self): - self.assertEquals(uilib.safe_cut(u'aaa
    aaad
    ef', 4), u'

    aaa

    a...
    ') - self.assertEquals(uilib.safe_cut(u'aaa
    aaad
    ef', 7), u'

    aaa

    aaad
    ...') - self.assertEquals(uilib.safe_cut(u'aaa
    aaad
    ', 7), u'

    aaa

    aaad
    ') - # Missing ellipsis due to space management but we don't care - self.assertEquals(uilib.safe_cut(u'ab &d', 4), u'

    ab &...

    ') - - def test_cut(self): - """tests uilib.cut() behaviour""" - data = [ - ('hello', 'hello'), - ('hello world', 'hello wo...'), - ("hellO' world", "hellO..."), - ] - for text, expected in data: - got = uilib.cut(text, 8) - self.assertEquals(got, expected) - - def test_text_cut(self): - """tests uilib.text_cut() behaviour with no text""" - data = [('',''), - ("""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod -tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, -quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo -consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse -cillum dolore eu fugiat nulla pariatur.""", - "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod \ -tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, \ -quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo \ -consequat."), - ("""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod -tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam, -quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo -consequat Duis aute irure dolor in reprehenderit in voluptate velit esse -cillum dolore eu fugiat nulla pariatur Excepteur sint occaecat cupidatat non -proident, sunt in culpa qui officia deserunt mollit anim id est laborum -""", - "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod \ -tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam, \ -quis nostrud exercitation ullamco laboris nisi"), - ] - for text, expected in data: - got = uilib.text_cut(text, 30) - self.assertEquals(got, expected) - -if __name__ == '__main__': - unittest_main() - diff -r 15d541321a8c -r 74c1597f8a82 common/uilib.py --- a/common/uilib.py Wed Jan 20 10:13:02 2010 +0100 +++ b/common/uilib.py Wed Jan 20 10:13:45 2010 +0100 @@ -1,383 +1,5 @@ -# -*- coding: utf-8 -*- -"""user interface libraries - -contains some functions designed to help implementation of cubicweb user interface - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -import csv -import re -from StringIO import StringIO - -from logilab.mtconverter import xml_escape, html_unescape - -from cubicweb.utils import ustrftime - -def rql_for_eid(eid): - """return the rql query necessary to fetch entity with the given eid. This - function should only be used to generate link with rql inside, not to give - to cursor.execute (in which case you won't benefit from rql cache). - - :Parameters: - - `eid`: the eid of the entity we should search - :rtype: str - :return: the rql query - """ - return 'Any X WHERE X eid %s' % eid - - -def printable_value(req, attrtype, value, props=None, displaytime=True): - """return a displayable value (i.e. unicode string)""" - if value is None or attrtype == 'Bytes': - return u'' - if attrtype == 'String': - # don't translate empty value if you don't want strange results - if props is not None and value and props.get('internationalizable'): - return req._(value) - return value - if attrtype == 'Date': - return ustrftime(value, req.property_value('ui.date-format')) - if attrtype == 'Time': - return ustrftime(value, req.property_value('ui.time-format')) - if attrtype == 'Datetime': - if displaytime: - return ustrftime(value, req.property_value('ui.datetime-format')) - return ustrftime(value, req.property_value('ui.date-format')) - if attrtype == 'Boolean': - if value: - return req._('yes') - return req._('no') - if attrtype == 'Float': - value = req.property_value('ui.float-format') % value - return unicode(value) - - -# text publishing ############################################################# - -try: - from cubicweb.ext.rest import rest_publish # pylint: disable-msg=W0611 -except ImportError: - def rest_publish(entity, data): - """default behaviour if docutils was not found""" - return xml_escape(data) - -TAG_PROG = re.compile(r'', re.U) -def remove_html_tags(text): - """Removes HTML tags from text - - >>> remove_html_tags('hi world') - 'hi world' - >>> - """ - return TAG_PROG.sub('', text) - - -REF_PROG = re.compile(r"([^<]*)", re.U) -def _subst_rql(view, obj): - delim, rql, descr = obj.groups() - return u'%s' % (view.build_url(rql=rql), descr) - -def html_publish(view, text): - """replace links by """ - if not text: - return u'' - return REF_PROG.sub(lambda obj, view=view:_subst_rql(view, obj), text) - -# fallback implementation, nicer one defined below if lxml is available -def soup2xhtml(data, encoding): - # normalize line break - # see http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1 - return u'\n'.join(data.splitlines()) - -# fallback implementation, nicer one defined below if lxml> 2.0 is available -def safe_cut(text, length): - """returns a string of length based on , removing any html - tags from given text if cut is necessary.""" - if text is None: - return u'' - noenttext = html_unescape(text) - text_nohtml = remove_html_tags(noenttext) - # try to keep html tags if text is short enough - if len(text_nohtml) <= length: - return text - # else if un-tagged text is too long, cut it - return xml_escape(text_nohtml[:length] + u'...') - -fallback_safe_cut = safe_cut - - -try: - from lxml import etree -except (ImportError, AttributeError): - # gae environment: lxml not available - pass -else: - - def soup2xhtml(data, encoding): - """tidy (at least try) html soup and return the result - Note: the function considers a string with no surrounding tag as valid - if
    `data`
    can be parsed by an XML parser - """ - # normalize line break - # see http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1 - data = u'\n'.join(data.splitlines()) - # XXX lxml 1.1 support still needed ? - xmltree = etree.HTML('
    %s
    ' % data) - # NOTE: lxml 1.1 (etch platforms) doesn't recognize - # the encoding=unicode parameter (lxml 2.0 does), this is - # why we specify an encoding and re-decode to unicode later - body = etree.tostring(xmltree[0], encoding=encoding) - # remove and and decode to unicode - return body[11:-13].decode(encoding) - - if hasattr(etree.HTML('
    test
    '), 'iter'): - - def safe_cut(text, length): - """returns an html document of length based on , - and cut is necessary. - """ - if text is None: - return u'' - dom = etree.HTML(text) - curlength = 0 - add_ellipsis = False - for element in dom.iter(): - if curlength >= length: - parent = element.getparent() - parent.remove(element) - if curlength == length and (element.text or element.tail): - add_ellipsis = True - else: - if element.text is not None: - element.text = cut(element.text, length - curlength) - curlength += len(element.text) - if element.tail is not None: - if curlength < length: - element.tail = cut(element.tail, length - curlength) - curlength += len(element.tail) - elif curlength == length: - element.tail = '...' - else: - element.tail = '' - text = etree.tounicode(dom[0])[6:-7] # remove wrapping - if add_ellipsis: - return text + u'...' - return text - -def text_cut(text, nbwords=30, gotoperiod=True): - """from the given plain text, return a text with at least words, - trying to go to the end of the current sentence. - - :param nbwords: the minimum number of words required - :param gotoperiod: specifies if the function should try to go to - the first period after the cut (i.e. finish - the sentence if possible) - - Note that spaces are normalized. - """ - if text is None: - return u'' - words = text.split() - text = u' '.join(words) # normalize spaces - textlength = minlength = len(' '.join(words[:nbwords])) - if gotoperiod: - textlength = text.find('.', minlength) + 1 - if textlength == 0: # no period found - textlength = minlength - return text[:textlength] - -def cut(text, length): - """returns a string of a maximum length based on - (approximatively, since if text has been cut, '...' is added to the end of the string, - resulting in a string of len + 3) - """ - if text is None: - return u'' - if len(text) <= length: - return text - # else if un-tagged text is too long, cut it - return text[:length] + u'...' - - - -# HTML generation helper functions ############################################ - -HTML4_EMPTY_TAGS = frozenset(('base', 'meta', 'link', 'hr', 'br', 'param', - 'img', 'area', 'input', 'col')) - -def sgml_attributes(attrs): - return u' '.join(u'%s="%s"' % (attr, xml_escape(unicode(value))) - for attr, value in sorted(attrs.items()) - if value is not None) - -def simple_sgml_tag(tag, content=None, escapecontent=True, **attrs): - """generation of a simple sgml tag (eg without children tags) easier - - content and attri butes will be escaped - """ - value = u'<%s' % tag - if attrs: - try: - attrs['class'] = attrs.pop('klass') - except KeyError: - pass - value += u' ' + sgml_attributes(attrs) - if content: - if escapecontent: - content = xml_escape(unicode(content)) - value += u'>%s' % (content, tag) - else: - if tag in HTML4_EMPTY_TAGS: - value += u' />' - else: - value += u'>' % tag - return value - -def tooltipize(text, tooltip, url=None): - """make an HTML tooltip""" - url = url or '#' - return u'
    %s' % (url, tooltip, text) - -def toggle_action(nodeid): - """builds a HTML link that uses the js toggleVisibility function""" - return u"javascript: toggleVisibility('%s')" % nodeid - -def toggle_link(nodeid, label): - """builds a HTML link that uses the js toggleVisibility function""" - return u'%s' % (toggle_action(nodeid), label) - - -def ureport_as_html(layout): - from logilab.common.ureports import HTMLWriter - formater = HTMLWriter(True) - stream = StringIO() #UStringIO() don't want unicode assertion - formater.format(layout, stream) - res = stream.getvalue() - if isinstance(res, str): - res = unicode(res, 'UTF8') - return res - -# traceback formatting ######################################################## - -import traceback - -def rest_traceback(info, exception): - """return a ReST formated traceback""" - res = [u'Traceback\n---------\n::\n'] - for stackentry in traceback.extract_tb(info[2]): - res.append(u'\tFile %s, line %s, function %s' % tuple(stackentry[:3])) - if stackentry[3]: - res.append(u'\t %s' % stackentry[3].decode('utf-8', 'replace')) - res.append(u'\n') - try: - res.append(u'\t Error: %s\n' % exception) - except: - pass - return u'\n'.join(res) - - -def html_traceback(info, exception, title='', - encoding='ISO-8859-1', body=''): - """ return an html formatted traceback from python exception infos. - """ - tcbk = info[2] - stacktb = traceback.extract_tb(tcbk) - strings = [] - if body: - strings.append(u'
    ') - # FIXME - strings.append(body) - strings.append(u'
    ') - if title: - strings.append(u'

    %s

    '% xml_escape(title)) - try: - strings.append(u'

    %s

    ' % xml_escape(str(exception)).replace("\n","
    ")) - except UnicodeError: - pass - strings.append(u'
    ') - for index, stackentry in enumerate(stacktb): - strings.append(u'File %s, line ' - u'%s, function ' - u'%s:
    '%( - xml_escape(stackentry[0]), stackentry[1], xml_escape(stackentry[2]))) - if stackentry[3]: - string = xml_escape(stackentry[3]).decode('utf-8', 'replace') - strings.append(u'  %s
    \n' % (string)) - # add locals info for each entry - try: - local_context = tcbk.tb_frame.f_locals - html_info = [] - chars = 0 - for name, value in local_context.iteritems(): - value = xml_escape(repr(value)) - info = u'%s=%s, ' % (name, value) - line_length = len(name) + len(value) - chars += line_length - # 150 is the result of *years* of research ;-) (CSS might be helpful here) - if chars > 150: - info = u'
    ' + info - chars = line_length - html_info.append(info) - boxid = 'ctxlevel%d' % index - strings.append(u'[%s]' % toggle_link(boxid, '+')) - strings.append(u'' % - (boxid, ''.join(html_info))) - tcbk = tcbk.tb_next - except Exception: - pass # doesn't really matter if we have no context info - strings.append(u'
    ') - return '\n'.join(strings) - -# csv files / unicode support ################################################# - -class UnicodeCSVWriter: - """proxies calls to csv.writer.writerow to be able to deal with unicode""" - - def __init__(self, wfunc, encoding, **kwargs): - self.writer = csv.writer(self, **kwargs) - self.wfunc = wfunc - self.encoding = encoding - - def write(self, data): - self.wfunc(data) - - def writerow(self, row): - csvrow = [] - for elt in row: - if isinstance(elt, unicode): - csvrow.append(elt.encode(self.encoding)) - else: - csvrow.append(str(elt)) - self.writer.writerow(csvrow) - - def writerows(self, rows): - for row in rows: - self.writerow(row) - - -# some decorators ############################################################# - -class limitsize(object): - def __init__(self, maxsize): - self.maxsize = maxsize - - def __call__(self, function): - def newfunc(*args, **kwargs): - ret = function(*args, **kwargs) - if isinstance(ret, basestring): - return ret[:self.maxsize] - return ret - return newfunc - - -def htmlescape(function): - def newfunc(*args, **kwargs): - ret = function(*args, **kwargs) - assert isinstance(ret, basestring) - return xml_escape(ret) - return newfunc +"""pre 3.6 bw compat""" +# pylint: disable-msg=W0614,W0401 +from warnings import warn +warn('moved to cubicweb.uilib', DeprecationWarning, stacklevel=2) +from cubicweb.uilib import * diff -r 15d541321a8c -r 74c1597f8a82 common/utils.py --- a/common/utils.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -"""pre 3.2 bw compat - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -# pylint: disable-msg=W0614,W0401 -from warnings import warn -warn('moved to cubicweb.utils', DeprecationWarning, stacklevel=2) -from cubicweb.utils import * diff -r 15d541321a8c -r 74c1597f8a82 common/view.py --- a/common/view.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -"""pre 3.2 bw compat - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -# pylint: disable-msg=W0614,W0401 -from warnings import warn -warn('moved to cubicweb.view', DeprecationWarning, stacklevel=2) -from cubicweb.view import * diff -r 15d541321a8c -r 74c1597f8a82 cwconfig.py --- a/cwconfig.py Wed Jan 20 10:13:02 2010 +0100 +++ b/cwconfig.py Wed Jan 20 10:13:45 2010 +0100 @@ -78,10 +78,11 @@ import sys import os import logging +import tempfile from smtplib import SMTP from threading import Lock from os.path import exists, join, expanduser, abspath, normpath, basename, isdir -import tempfile +from warnings import warn from logilab.common.decorators import cached from logilab.common.deprecation import deprecated @@ -480,14 +481,23 @@ (ctlfile, err)) cls.info('loaded cubicweb-ctl plugin %s', ctlfile) for cube in cls.available_cubes(): - pluginfile = join(cls.cube_dir(cube), 'ecplugin.py') + oldpluginfile = join(cls.cube_dir(cube), 'ecplugin.py') + pluginfile = join(cls.cube_dir(cube), 'ccplugin.py') initfile = join(cls.cube_dir(cube), '__init__.py') if exists(pluginfile): try: + __import__('cubes.%s.ccplugin' % cube) + cls.info('loaded cubicweb-ctl plugin from %s', cube) + except: + cls.exception('while loading plugin %s', pluginfile) + elif exists(oldpluginfile): + warn('[3.6] %s: ecplugin module should be renamed to ccplugin' % cube, + DeprecationWarning) + try: __import__('cubes.%s.ecplugin' % cube) cls.info('loaded cubicweb-ctl plugin from %s', cube) except: - cls.exception('while loading plugin %s', pluginfile) + cls.exception('while loading plugin %s', oldpluginfile) elif exists(initfile): try: __import__('cubes.%s' % cube) @@ -556,6 +566,7 @@ return vregpath def __init__(self): + register_stored_procedures() ConfigurationMixIn.__init__(self) self.adjust_sys_path() self.load_defaults() @@ -935,11 +946,11 @@ def migration_handler(self): """return a migration handler instance""" - from cubicweb.common.migration import MigrationHelper + from cubicweb.migration import MigrationHelper return MigrationHelper(self, verbosity=self.verbosity) def i18ncompile(self, langs=None): - from cubicweb.common import i18n + from cubicweb import i18n if langs is None: langs = self.available_languages() i18ndir = join(self.apphome, 'i18n') @@ -976,3 +987,53 @@ # alias to get a configuration instance from an instance id instance_configuration = CubicWebConfiguration.config_for application_configuration = deprecated('use instance_configuration')(instance_configuration) + + +_EXT_REGISTERED = False +def register_stored_procedures(): + from logilab.common.adbh import FunctionDescr + from rql.utils import register_function, iter_funcnode_variables + + global _EXT_REGISTERED + if _EXT_REGISTERED: + return + _EXT_REGISTERED = True + + class COMMA_JOIN(FunctionDescr): + supported_backends = ('postgres', 'sqlite',) + rtype = 'String' + + @classmethod + def st_description(cls, funcnode, mainindex, tr): + return ', '.join(sorted(term.get_description(mainindex, tr) + for term in iter_funcnode_variables(funcnode))) + + register_function(COMMA_JOIN) # XXX do not expose? + + + class CONCAT_STRINGS(COMMA_JOIN): + aggregat = True + + register_function(CONCAT_STRINGS) # XXX bw compat + + class GROUP_CONCAT(CONCAT_STRINGS): + supported_backends = ('mysql', 'postgres', 'sqlite',) + + register_function(GROUP_CONCAT) + + + class LIMIT_SIZE(FunctionDescr): + supported_backends = ('postgres', 'sqlite',) + rtype = 'String' + + @classmethod + def st_description(cls, funcnode, mainindex, tr): + return funcnode.children[0].get_description(mainindex, tr) + + register_function(LIMIT_SIZE) + + + class TEXT_LIMIT_SIZE(LIMIT_SIZE): + supported_backends = ('mysql', 'postgres', 'sqlite',) + + register_function(TEXT_LIMIT_SIZE) diff -r 15d541321a8c -r 74c1597f8a82 cwctl.py --- a/cwctl.py Wed Jan 20 10:13:02 2010 +0100 +++ b/cwctl.py Wed Jan 20 10:13:45 2010 +0100 @@ -17,9 +17,9 @@ from logilab.common.clcommands import register_commands, pop_arg from logilab.common.shellutils import ASK -from cubicweb import ConfigurationError, ExecutionError, BadCommandUsage, underline_title +from cubicweb import ConfigurationError, ExecutionError, BadCommandUsage from cubicweb.cwconfig import CubicWebConfiguration as cwcfg, CWDEV, CONFIGURATIONS -from cubicweb.toolsutils import Command, main_run, rm, create_dir +from cubicweb.toolsutils import Command, main_run, rm, create_dir, underline_title def wait_process_end(pid, maxtry=10, waittime=1): """wait for a process to actually die""" @@ -312,7 +312,7 @@ # handle i18n files structure # in the first cube given print '-> preparing i18n catalogs' - from cubicweb.common import i18n + from cubicweb import i18n langs = [lang for lang, _ in i18n.available_catalogs(join(templdirs[0], 'i18n'))] errors = config.i18ncompile(langs) if errors: @@ -690,7 +690,7 @@ # * install new languages # * recompile catalogs # in the first componant given - from cubicweb.common import i18n + from cubicweb import i18n templdir = cwcfg.cube_dir(config.cubes()[0]) langs = [lang for lang, _ in i18n.available_catalogs(join(templdir, 'i18n'))] errors = config.i18ncompile(langs) diff -r 15d541321a8c -r 74c1597f8a82 cwvreg.py --- a/cwvreg.py Wed Jan 20 10:13:02 2010 +0100 +++ b/cwvreg.py Wed Jan 20 10:13:45 2010 +0100 @@ -18,7 +18,7 @@ ObjectNotFound, NoSelectableObject, RegistryNotFound, RegistryOutOfDate, CW_EVENT_MANAGER, onevent) from cubicweb.utils import dump_class -from cubicweb.vregistry import VRegistry, Registry +from cubicweb.vregistry import VRegistry, Registry, class_regid from cubicweb.rtags import RTAGS @@ -59,12 +59,9 @@ return self.vreg.schema def initialization_completed(self): - # call vreg_initialization_completed on appobjects and print - # registry content - for appobjects in self.itervalues(): - for appobject in appobjects: - appobject.vreg_initialization_completed() + pass + @deprecated('[3.6] select object, then use obj.render()') def render(self, __oid, req, __fallback_oid=None, rset=None, initargs=None, **kwargs): """Select object with the given id (`__oid`) then render it. If the @@ -89,20 +86,22 @@ obj = self.select(__fallback_oid, req, rset=rset, **initargs) return obj.render(**kwargs) + @deprecated('[3.6] use select_or_none and test for obj.cw_propval("visible")') def select_vobject(self, oid, *args, **kwargs): - selected = self.select_object(oid, *args, **kwargs) - if selected and selected.propval('visible'): + selected = self.select_or_none(oid, *args, **kwargs) + if selected and selected.cw_propval('visible'): return selected return None - def possible_vobjects(self, *args, **kwargs): + def poss_visible_objects(self, *args, **kwargs): """return an ordered list of possible app objects in a given registry, supposing they support the 'visible' and 'order' properties (as most visualizable objects) """ return sorted([x for x in self.possible_objects(*args, **kwargs) - if x.propval('visible')], - key=lambda x: x.propval('order')) + if x.cw_propval('visible')], + key=lambda x: x.cw_propval('order')) + possible_vobjects = deprecated('[3.6] use poss_visible_objects()')(poss_visible_objects) VRegistry.REGISTRY_FACTORY[None] = CWRegistry @@ -118,15 +117,24 @@ clear_cache(self, 'etype_class') def register(self, obj, **kwargs): - oid = kwargs.get('oid') or obj.id + oid = kwargs.get('oid') or class_regid(obj) if oid != 'Any' and not oid in self.schema: self.error('don\'t register %s, %s type not defined in the ' - 'schema', obj, obj.id) + 'schema', obj, oid) return kwargs['clear'] = True super(ETypeRegistry, self).register(obj, **kwargs) @cached + def parent_classes(self, etype): + if etype == 'Any': + return [self.etype_class('Any')] + eschema = self.schema.eschema(etype) + parents = [self.etype_class(e.type) for e in eschema.ancestors()] + parents.append(self.etype_class('Any')) + return parents + + @cached def etype_class(self, etype): """return an entity class for the given entity type. @@ -137,42 +145,42 @@ """ etype = str(etype) if etype == 'Any': - return self.select('Any', 'Any') + objects = self['Any'] + assert len(objects) == 1, objects + return objects[0] eschema = self.schema.eschema(etype) baseschemas = [eschema] + eschema.ancestors() # browse ancestors from most specific to most generic and try to find an # associated custom entity class - cls = None for baseschema in baseschemas: try: btype = ETYPE_NAME_MAP[baseschema] except KeyError: btype = str(baseschema) - if cls is None: - try: - objects = self[btype] - assert len(objects) == 1, objects - if btype == etype: - cls = objects[0] - else: - cls = self.etype_class(btype) - except ObjectNotFound: - continue - else: - # ensure parent classes are built first - self.etype_class(btype) - if cls is None: + try: + objects = self[btype] + assert len(objects) == 1, objects + if btype == etype: + cls = objects[0] + else: + # recurse to ensure issubclass(etype_class('Child'), + # etype_class('Parent')) + cls = self.etype_class(btype) + break + except ObjectNotFound: + pass + else: # no entity class for any of the ancestors, fallback to the default # one objects = self['Any'] assert len(objects) == 1, objects cls = objects[0] - # make a copy event if cls.id == etype, else we may have pb for client - # application using multiple connections to different repositories (eg - # shingouz) + # make a copy event if cls.__regid__ == etype, else we may have pb for + # client application using multiple connections to different + # repositories (eg shingouz) cls = dump_class(cls, etype) - cls.id = etype - cls.__initialize__() + cls.__regid__ = etype + cls.__initialize__(self.schema) return cls VRegistry.REGISTRY_FACTORY['etypes'] = ETypeRegistry @@ -180,12 +188,13 @@ class ViewsRegistry(CWRegistry): - def main_template(self, req, oid='main-template', **kwargs): + def main_template(self, req, oid='main-template', rset=None, **kwargs): """display query by calling the given template (default to main), and returning the output as a string instead of requiring the [w]rite method as argument """ - res = self.render(oid, req, **kwargs) + obj = self.select(oid, req, rset=rset, **kwargs) + res = obj.render(**kwargs) if isinstance(res, unicode): return res.encode(req.encoding) assert isinstance(res, str) @@ -200,7 +209,7 @@ if vid[0] == '_': continue try: - view = self.select_best(views, req, rset=rset, **kwargs) + view = self._select_best(views, req, rset=rset, **kwargs) if view.linkable(): yield view except NoSelectableObject: @@ -216,7 +225,7 @@ def possible_actions(self, req, rset=None, **kwargs): if rset is None: - actions = self.possible_vobjects(req, rset=rset, **kwargs) + actions = self.poss_visible_objects(req, rset=rset, **kwargs) else: actions = rset.possible_actions(**kwargs) # cached implementation result = {} diff -r 15d541321a8c -r 74c1597f8a82 dbapi.py --- a/dbapi.py Wed Jan 20 10:13:02 2010 +0100 +++ b/dbapi.py Wed Jan 20 10:13:45 2010 +0100 @@ -19,8 +19,9 @@ from logilab.common.decorators import monkeypatch from logilab.common.deprecation import deprecated -from cubicweb import ETYPE_NAME_MAP, ConnectionError, RequestSessionMixIn -from cubicweb import cwvreg, cwconfig +from cubicweb import ETYPE_NAME_MAP, ConnectionError, cwvreg, cwconfig +from cubicweb.req import RequestSessionBase + _MARKER = object() @@ -42,9 +43,9 @@ registries. """ defaultcls = cwvreg.VRegistry.REGISTRY_FACTORY[None] - orig_select_best = defaultcls.orig_select_best = defaultcls.select_best + orig_select_best = defaultcls.orig_select_best = defaultcls._select_best @monkeypatch(defaultcls) - def select_best(self, appobjects, *args, **kwargs): + def _select_best(self, appobjects, *args, **kwargs): """return an instance of the most specific object according to parameters @@ -110,20 +111,21 @@ except Exception, ex: raise ConnectionError(str(ex)) -def repo_connect(repo, login, password, cnxprops=None): +def repo_connect(repo, login, **kwargs): """Constructor to create a new connection to the CubicWeb repository. Returns a Connection instance. """ - cnxprops = cnxprops or ConnectionProperties('inmemory') - cnxid = repo.connect(unicode(login), password, cnxprops=cnxprops) - cnx = Connection(repo, cnxid, cnxprops) - if cnxprops.cnxtype == 'inmemory': + if not 'cnxprops' in kwargs: + kwargs['cnxprops'] = ConnectionProperties('inmemory') + cnxid = repo.connect(unicode(login), **kwargs) + cnx = Connection(repo, cnxid, kwargs['cnxprops']) + if kwargs['cnxprops'].cnxtype == 'inmemory': cnx.vreg = repo.vreg return cnx -def connect(database=None, login=None, password=None, host=None, group=None, - cnxprops=None, setvreg=True, mulcnx=True, initlog=True): +def connect(database=None, login=None, host=None, group=None, + cnxprops=None, setvreg=True, mulcnx=True, initlog=True, **kwargs): """Constructor for creating a connection to the CubicWeb repository. Returns a Connection object. @@ -153,11 +155,11 @@ vreg.set_schema(schema) else: vreg = None - cnx = repo_connect(repo, login, password, cnxprops) + cnx = repo_connect(repo, login, cnxprops=cnxprops, **kwargs) cnx.vreg = vreg return cnx -def in_memory_cnx(config, login, password): +def in_memory_cnx(config, login, **kwargs): """usefull method for testing and scripting to get a dbapi.Connection object connected to an in-memory repository instance """ @@ -170,11 +172,11 @@ repo = get_repository('inmemory', config=config, vreg=vreg) # connection to the CubicWeb repository cnxprops = ConnectionProperties('inmemory') - cnx = repo_connect(repo, login, password, cnxprops=cnxprops) + cnx = repo_connect(repo, login, cnxprops=cnxprops, **kwargs) return repo, cnx -class DBAPIRequest(RequestSessionMixIn): +class DBAPIRequest(RequestSessionBase): def __init__(self, vreg, cnx=None): super(DBAPIRequest, self).__init__(vreg) diff -r 15d541321a8c -r 74c1597f8a82 debian.hardy/rules --- a/debian.hardy/rules Wed Jan 20 10:13:02 2010 +0100 +++ b/debian.hardy/rules Wed Jan 20 10:13:45 2010 +0100 @@ -33,7 +33,6 @@ dh_clean dh_installdirs - #python setup.py install_lib --no-compile --install-dir=debian/cubicweb-common/usr/lib/python2.4/site-packages/ python setup.py -q install --no-compile --prefix=debian/tmp/usr # Put all the python library and data in cubicweb-common @@ -51,7 +50,6 @@ rm -rf debian/cubicweb-web/usr/lib/${PY_VERSION}/site-packages/cubicweb/web/test rm -rf debian/cubicweb-twisted/usr/lib/${PY_VERSION}/site-packages/cubicweb/etwist/test rm -rf debian/cubicweb-common/usr/lib/${PY_VERSION}/site-packages/cubicweb/ext/test - rm -rf debian/cubicweb-common/usr/lib/${PY_VERSION}/site-packages/cubicweb/common/test rm -rf debian/cubicweb-common/usr/lib/${PY_VERSION}/site-packages/cubicweb/entities/test # cubes directory must be managed as a valid python module diff -r 15d541321a8c -r 74c1597f8a82 debian/cubicweb-dev.install.in --- a/debian/cubicweb-dev.install.in Wed Jan 20 10:13:02 2010 +0100 +++ b/debian/cubicweb-dev.install.in Wed Jan 20 10:13:45 2010 +0100 @@ -6,6 +6,7 @@ debian/tmp/usr/lib/PY_VERSION/site-packages/cubicweb/ext/test usr/lib/PY_VERSION/site-packages/cubicweb/ext/ debian/tmp/usr/lib/PY_VERSION/site-packages/cubicweb/server/test usr/lib/PY_VERSION/site-packages/cubicweb/server/ debian/tmp/usr/lib/PY_VERSION/site-packages/cubicweb/sobjects/test usr/lib/PY_VERSION/site-packages/cubicweb/sobjects/ +debian/tmp/usr/lib/PY_VERSION/site-packages/cubicweb/hooks/test usr/lib/PY_VERSION/site-packages/cubicweb/sobjects/ debian/tmp/usr/lib/PY_VERSION/site-packages/cubicweb/web/test usr/lib/PY_VERSION/site-packages/cubicweb/web/ debian/tmp/usr/lib/PY_VERSION/site-packages/cubicweb/etwist/test usr/lib/PY_VERSION/site-packages/cubicweb/etwist/ debian/tmp/usr/lib/PY_VERSION/site-packages/cubicweb/goa/test usr/lib/PY_VERSION/site-packages/cubicweb/goa/ diff -r 15d541321a8c -r 74c1597f8a82 debian/cubicweb-server.install.in --- a/debian/cubicweb-server.install.in Wed Jan 20 10:13:02 2010 +0100 +++ b/debian/cubicweb-server.install.in Wed Jan 20 10:13:45 2010 +0100 @@ -1,4 +1,5 @@ debian/tmp/usr/lib/PY_VERSION/site-packages/cubicweb/server/ usr/lib/PY_VERSION/site-packages/cubicweb +debian/tmp/usr/lib/PY_VERSION/site-packages/cubicweb/hooks/ usr/lib/PY_VERSION/site-packages/cubicweb debian/tmp/usr/lib/PY_VERSION/site-packages/cubicweb/sobjects/ usr/lib/PY_VERSION/site-packages/cubicweb debian/tmp/usr/lib/PY_VERSION/site-packages/cubicweb/schemas/ usr/lib/PY_VERSION/site-packages/cubicweb debian/tmp/usr/share/cubicweb/migration/ usr/share/cubicweb/ diff -r 15d541321a8c -r 74c1597f8a82 debian/rules --- a/debian/rules Wed Jan 20 10:13:02 2010 +0100 +++ b/debian/rules Wed Jan 20 10:13:45 2010 +0100 @@ -48,6 +48,7 @@ # Remove unittests directory (should be available in cubicweb-dev only) rm -rf debian/cubicweb-server/usr/lib/${PY_VERSION}/site-packages/cubicweb/server/test + rm -rf debian/cubicweb-server/usr/lib/${PY_VERSION}/site-packages/cubicweb/hooks/test rm -rf debian/cubicweb-server/usr/lib/${PY_VERSION}/site-packages/cubicweb/sobjects/test rm -rf debian/cubicweb-web/usr/lib/${PY_VERSION}/site-packages/cubicweb/web/test rm -rf debian/cubicweb-twisted/usr/lib/${PY_VERSION}/site-packages/cubicweb/etwist/test diff -r 15d541321a8c -r 74c1597f8a82 devtools/__init__.py --- a/devtools/__init__.py Wed Jan 20 10:13:02 2010 +0100 +++ b/devtools/__init__.py Wed Jan 20 10:13:45 2010 +0100 @@ -13,20 +13,55 @@ from os.path import (abspath, join, exists, basename, dirname, normpath, split, isfile, isabs) -from cubicweb import CW_SOFTWARE_ROOT, ConfigurationError +from cubicweb import CW_SOFTWARE_ROOT, ConfigurationError, schema, cwconfig from cubicweb.utils import strptime -from cubicweb.toolsutils import read_config -from cubicweb.cwconfig import CubicWebConfiguration, merge_options from cubicweb.server.serverconfig import ServerConfiguration from cubicweb.etwist.twconfig import TwistedConfiguration +cwconfig.CubicWebConfiguration.cls_adjust_sys_path() + +# db auto-population configuration ############################################# + +SYSTEM_ENTITIES = schema.SCHEMA_TYPES | set(( + 'CWGroup', 'CWUser', 'CWProperty', + 'Workflow', 'State', 'BaseTransition', 'Transition', 'WorkflowTransition', + 'TrInfo', 'SubWorkflowExitPoint', + )) + +SYSTEM_RELATIONS = schema.META_RTYPES | set(( + # workflow related + 'workflow_of', 'state_of', 'transition_of', 'initial_state', 'allowed_transition', + 'destination_state', 'in_state', 'wf_info_for', 'from_state', 'to_state', + 'condition', 'subworkflow', 'subworkflow_state', 'subworkflow_exit', + # cwproperty + 'for_user', + # schema definition + 'specializes', + 'relation_type', 'from_entity', 'to_entity', + 'constrained_by', 'cstrtype', 'widget', + 'read_permission', 'update_permission', 'delete_permission', 'add_permission', + # permission + 'in_group', 'require_group', 'require_permission', + # deducted from other relations + 'primary_email', + )) + +# content validation configuration ############################################# + # validators are used to validate (XML, DTD, whatever) view's content # validators availables are : # 'dtd' : validates XML + declared DTD # 'xml' : guarantees XML is well formed # None : do not try to validate anything + +# {'vid': validator} VIEW_VALIDATORS = {} + + +# cubicweb test configuration ################################################## + BASE_URL = 'http://testing.fr/cubicweb/' + DEFAULT_SOURCES = {'system': {'adapter' : 'native', 'db-encoding' : 'UTF-8', #'ISO-8859-1', 'db-user' : u'admin', @@ -40,13 +75,14 @@ }, } + class TestServerConfiguration(ServerConfiguration): mode = 'test' set_language = False read_instance_schema = False bootstrap_schema = False init_repository = True - options = merge_options(ServerConfiguration.options + ( + options = cwconfig.merge_options(ServerConfiguration.options + ( ('anonymous-user', {'type' : 'string', 'default': None, @@ -66,10 +102,11 @@ def __init__(self, appid, log_threshold=logging.CRITICAL+10): ServerConfiguration.__init__(self, appid) - self.global_set_option('log-file', None) self.init_log(log_threshold, force=True) # need this, usually triggered by cubicweb-ctl self.load_cwctl_plugins() + self.global_set_option('anonymous-user', 'anon') + self.global_set_option('anonymous-password', 'anon') anonymous_user = TwistedConfiguration.anonymous_user.im_func @@ -81,6 +118,11 @@ return abspath('..') appdatahome = apphome + def load_configuration(self): + super(TestServerConfiguration, self).load_configuration() + self.global_set_option('anonymous-user', 'anon') + self.global_set_option('anonymous-password', 'anon') + def main_config_file(self): """return instance's control configuration file""" return join(self.apphome, '%s.conf' % self.name) @@ -118,30 +160,11 @@ sources = DEFAULT_SOURCES return sources - def load_defaults(self): - super(TestServerConfiguration, self).load_defaults() - # note: don't call global set option here, OptionManager may not yet be initialized - # add anonymous user - self.set_option('anonymous-user', 'anon') - self.set_option('anonymous-password', 'anon') - # uncomment the line below if you want rql queries to be logged - #self.set_option('query-log-file', '/tmp/test_rql_log.' + `os.getpid()`) - self.set_option('sender-name', 'cubicweb-test') - self.set_option('sender-addr', 'cubicweb-test@logilab.fr') - try: - send_to = '%s@logilab.fr' % os.getlogin() - except (OSError, AttributeError): - send_to = '%s@logilab.fr' % (os.environ.get('USER') - or os.environ.get('USERNAME') - or os.environ.get('LOGNAME')) - self.set_option('sender-addr', send_to) - self.set_option('default-dest-addrs', send_to) - self.set_option('base-url', BASE_URL) - class BaseApptestConfiguration(TestServerConfiguration, TwistedConfiguration): repo_method = 'inmemory' - options = merge_options(TestServerConfiguration.options + TwistedConfiguration.options) + options = cwconfig.merge_options(TestServerConfiguration.options + + TwistedConfiguration.options) cubicweb_appobject_path = TestServerConfiguration.cubicweb_appobject_path | TwistedConfiguration.cubicweb_appobject_path cube_appobject_path = TestServerConfiguration.cube_appobject_path | TwistedConfiguration.cube_appobject_path @@ -163,98 +186,84 @@ BaseApptestConfiguration.__init__(self, appid, log_threshold=log_threshold) self.init_repository = sourcefile is None self.sourcefile = sourcefile - import re - self.global_set_option('embed-allowed', re.compile('.*')) -class RealDatabaseConfiguration(ApptestConfiguration): - init_repository = False - sourcesdef = {'system': {'adapter' : 'native', - 'db-encoding' : 'UTF-8', #'ISO-8859-1', - 'db-user' : u'admin', - 'db-password' : 'gingkow', - 'db-name' : 'seotest', - 'db-driver' : 'postgres', - 'db-host' : None, - }, - 'admin' : {'login': u'admin', - 'password': u'gingkow', - }, - } +# test database handling ####################################################### - def __init__(self, appid, log_threshold=logging.CRITICAL, sourcefile=None): - ApptestConfiguration.__init__(self, appid) - self.init_repository = False +def init_test_database(config=None, configdir='data'): + """init a test database for a specific driver""" + from cubicweb.dbapi import in_memory_cnx + config = config or TestServerConfiguration(configdir) + sources = config.sources() + driver = sources['system']['db-driver'] + if driver == 'sqlite': + init_test_database_sqlite(config) + elif driver == 'postgres': + init_test_database_postgres(config) + else: + raise ValueError('no initialization function for driver %r' % driver) + config._cubes = None # avoid assertion error + repo, cnx = in_memory_cnx(config, unicode(sources['admin']['login']), + password=sources['admin']['password'] or 'xxx') + if driver == 'sqlite': + install_sqlite_patch(repo.querier) + return repo, cnx - def sources(self): - """ - By default, we run tests with the sqlite DB backend. - One may use its own configuration by just creating a - 'sources' file in the test directory from wich tests are - launched. - """ - self._sources = self.sourcesdef - return self._sources +def reset_test_database(config): + """init a test database for a specific driver""" + driver = config.sources()['system']['db-driver'] + if driver == 'sqlite': + reset_test_database_sqlite(config) + else: + raise ValueError('no reset function for driver %r' % driver) -def buildconfig(dbuser, dbpassword, dbname, adminuser, adminpassword, dbhost=None): - """convenience function that builds a real-db configuration class""" - sourcesdef = {'system': {'adapter' : 'native', - 'db-encoding' : 'UTF-8', #'ISO-8859-1', - 'db-user' : dbuser, - 'db-password' : dbpassword, - 'db-name' : dbname, - 'db-driver' : 'postgres', - 'db-host' : dbhost, - }, - 'admin' : {'login': adminuser, - 'password': adminpassword, - }, - } - return type('MyRealDBConfig', (RealDatabaseConfiguration,), - {'sourcesdef': sourcesdef}) +### postgres test database handling ############################################ -def loadconfig(filename): - """convenience function that builds a real-db configuration class - from a file - """ - return type('MyRealDBConfig', (RealDatabaseConfiguration,), - {'sourcesdef': read_config(filename)}) +def init_test_database_postgres(config): + """initialize a fresh sqlite databse used for testing purpose""" + if config.init_repository: + from cubicweb.server import init_repository + init_repository(config, interactive=False, drop=True) -class LivetestConfiguration(BaseApptestConfiguration): - init_repository = False +### sqlite test database handling ############################################## + +def cleanup_sqlite(dbfile, removetemplate=False): + try: + os.remove(dbfile) + os.remove('%s-journal' % dbfile) + except OSError: + pass + if removetemplate: + try: + os.remove('%s-template' % dbfile) + except OSError: + pass - def __init__(self, cube=None, sourcefile=None, pyro_name=None, - log_threshold=logging.CRITICAL): - TestServerConfiguration.__init__(self, cube, log_threshold=log_threshold) - self.appid = pyro_name or cube - # don't change this, else some symlink problems may arise in some - # environment (e.g. mine (syt) ;o) - # XXX I'm afraid this test will prevent to run test from a production - # environment - self._sources = None - # instance cube test - if cube is not None: - self.apphome = self.cube_dir(cube) - elif 'web' in os.getcwd().split(os.sep): - # web test - self.apphome = join(normpath(join(dirname(__file__), '..')), 'web') - else: - # cube test - self.apphome = abspath('..') - self.sourcefile = sourcefile - self.global_set_option('realm', '') - self.use_pyro = pyro_name is not None +def reset_test_database_sqlite(config): + import shutil + dbfile = config.sources()['system']['db-name'] + cleanup_sqlite(dbfile) + template = '%s-template' % dbfile + if exists(template): + shutil.copy(template, dbfile) + return True + return False - def pyro_enabled(self): - if self.use_pyro: - return True - else: - return False +def init_test_database_sqlite(config): + """initialize a fresh sqlite databse used for testing purpose""" + # remove database file if it exists + dbfile = config.sources()['system']['db-name'] + if not reset_test_database_sqlite(config): + # initialize the database + import shutil + from cubicweb.server import init_repository + init_repository(config, interactive=False) + dbfile = config.sources()['system']['db-name'] + shutil.copy(dbfile, '%s-template' % dbfile) -CubicWebConfiguration.cls_adjust_sys_path() def install_sqlite_patch(querier): """This patch hotfixes the following sqlite bug : @@ -293,58 +302,3 @@ return new_execute querier.__class__.execute = wrap_execute(querier.__class__.execute) querier.__class__._devtools_sqlite_patched = True - -def init_test_database(driver='sqlite', configdir='data', config=None, - vreg=None): - """init a test database for a specific driver""" - from cubicweb.dbapi import in_memory_cnx - if vreg and not config: - config = vreg.config - config = config or TestServerConfiguration(configdir) - source = config.sources() - if driver == 'sqlite': - init_test_database_sqlite(config, source) - elif driver == 'postgres': - init_test_database_postgres(config, source) - else: - raise ValueError('no initialization function for driver %r' % driver) - config._cubes = None # avoid assertion error - repo, cnx = in_memory_cnx(vreg or config, unicode(source['admin']['login']), - source['admin']['password'] or 'xxx') - if driver == 'sqlite': - install_sqlite_patch(repo.querier) - return repo, cnx - -def init_test_database_postgres(config, source, vreg=None): - """initialize a fresh sqlite databse used for testing purpose""" - if config.init_repository: - from cubicweb.server import init_repository - init_repository(config, interactive=False, drop=True, vreg=vreg) - -def cleanup_sqlite(dbfile, removecube=False): - try: - os.remove(dbfile) - os.remove('%s-journal' % dbfile) - except OSError: - pass - if removecube: - try: - os.remove('%s-template' % dbfile) - except OSError: - pass - -def init_test_database_sqlite(config, source, vreg=None): - """initialize a fresh sqlite databse used for testing purpose""" - import shutil - # remove database file if it exists (actually I know driver == 'sqlite' :) - dbfile = source['system']['db-name'] - source['system']['db-name'] = os.path.abspath(dbfile) - cleanup_sqlite(dbfile) - template = '%s-template' % dbfile - if exists(template): - shutil.copy(template, dbfile) - else: - # initialize the database - from cubicweb.server import init_repository - init_repository(config, interactive=False, vreg=vreg) - shutil.copy(dbfile, template) diff -r 15d541321a8c -r 74c1597f8a82 devtools/_apptest.py --- a/devtools/_apptest.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,207 +0,0 @@ -"""Hidden internals for the devtools.apptest module - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -import sys, traceback - -from logilab.common.pytest import pause_tracing, resume_tracing - -import yams.schema - -from cubicweb.dbapi import repo_connect, ConnectionProperties, ProgrammingError -from cubicweb.cwvreg import CubicWebVRegistry - -from cubicweb.web.application import CubicWebPublisher -from cubicweb.web import Redirect - -from cubicweb.devtools import ApptestConfiguration, init_test_database -from cubicweb.devtools.fake import FakeRequest - -SYSTEM_ENTITIES = ('CWGroup', 'CWUser', - 'CWAttribute', 'CWRelation', - 'CWConstraint', 'CWConstraintType', 'CWProperty', - 'CWEType', 'CWRType', - 'Workflow', 'State', 'BaseTransition', 'Transition', 'WorkflowTransition', 'TrInfo', 'SubWorkflowExitPoint', - 'RQLExpression', - ) -SYSTEM_RELATIONS = ( - # virtual relation - 'identity', - # metadata - 'is', 'is_instance_of', 'owned_by', 'created_by', 'specializes', - # workflow related - 'workflow_of', 'state_of', 'transition_of', 'initial_state', 'allowed_transition', - 'destination_state', 'in_state', 'wf_info_for', 'from_state', 'to_state', - 'condition', 'subworkflow', 'subworkflow_state', 'subworkflow_exit', - # permission - 'in_group', 'require_group', 'require_permission', - 'read_permission', 'update_permission', 'delete_permission', 'add_permission', - # eproperty - 'for_user', - # schema definition - 'relation_type', 'from_entity', 'to_entity', - 'constrained_by', 'cstrtype', 'widget', - # deducted from other relations - 'primary_email', - ) - -def unprotected_entities(app_schema, strict=False): - """returned a Set of each non final entity type, excluding CWGroup, and CWUser... - """ - if strict: - protected_entities = yams.schema.BASE_TYPES - else: - protected_entities = yams.schema.BASE_TYPES.union(set(SYSTEM_ENTITIES)) - entities = set(app_schema.entities()) - return entities - protected_entities - - -def ignore_relations(*relations): - global SYSTEM_RELATIONS - SYSTEM_RELATIONS += relations - - -class TestEnvironment(object): - """TestEnvironment defines a context (e.g. a config + a given connection) in - which the tests are executed - """ - - def __init__(self, appid, reporter=None, verbose=False, - configcls=ApptestConfiguration, requestcls=FakeRequest): - config = configcls(appid) - self.requestcls = requestcls - self.cnx = None - config.db_perms = False - source = config.sources()['system'] - if verbose: - print "init test database ..." - self.vreg = vreg = CubicWebVRegistry(config) - self.admlogin = source['db-user'] - # restore database <=> init database - self.restore_database() - if verbose: - print "init done" - config.repository = lambda x=None: self.repo - self.app = CubicWebPublisher(config, vreg=vreg) - self.verbose = verbose - schema = self.vreg.schema - # else we may run into problems since email address are ususally share in app tests - # XXX should not be necessary anymore - schema.rschema('primary_email').set_rproperty('CWUser', 'EmailAddress', 'composite', False) - self.deletable_entities = unprotected_entities(schema) - - def restore_database(self): - """called by unittests' tearDown to restore the original database - """ - try: - pause_tracing() - if self.cnx: - self.cnx.close() - source = self.vreg.config.sources()['system'] - self.repo, self.cnx = init_test_database(driver=source['db-driver'], - vreg=self.vreg) - self._orig_cnx = self.cnx - resume_tracing() - except: - resume_tracing() - traceback.print_exc() - sys.exit(1) - # XXX cnx decoration is usually done by the repository authentication manager, - # necessary in authentication tests - self.cnx.vreg = self.vreg - self.cnx.login = source['db-user'] - self.cnx.password = source['db-password'] - - - def create_user(self, login, groups=('users',), req=None): - req = req or self.create_request() - cursor = self._orig_cnx.cursor(req) - rset = cursor.execute('INSERT CWUser X: X login %(login)s, X upassword %(passwd)s', - {'login': unicode(login), 'passwd': login.encode('utf8')}) - user = rset.get_entity(0, 0) - cursor.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)' - % ','.join(repr(g) for g in groups), - {'x': user.eid}, 'x') - user.clear_related_cache('in_group', 'subject') - self._orig_cnx.commit() - return user - - def login(self, login, password=None): - if login == self.admlogin: - self.restore_connection() - else: - self.cnx = repo_connect(self.repo, unicode(login), - password or str(login), - ConnectionProperties('inmemory')) - if login == self.vreg.config.anonymous_user()[0]: - self.cnx.anonymous_connection = True - return self.cnx - - def restore_connection(self): - if not self.cnx is self._orig_cnx: - try: - self.cnx.close() - except ProgrammingError: - pass # already closed - self.cnx = self._orig_cnx - - ############################################################################ - - def execute(self, rql, args=None, eidkey=None, req=None): - """executes , builds a resultset, and returns a couple (rset, req) - where req is a FakeRequest - """ - req = req or self.create_request(rql=rql) - return self.cnx.cursor(req).execute(unicode(rql), args, eidkey) - - def create_request(self, rql=None, **kwargs): - """executes , builds a resultset, and returns a - couple (rset, req) where req is a FakeRequest - """ - if rql: - kwargs['rql'] = rql - req = self.requestcls(self.vreg, form=kwargs) - req.set_connection(self.cnx) - return req - - def get_rset_and_req(self, rql, optional_args=None, args=None, eidkey=None): - """executes , builds a resultset, and returns a - couple (rset, req) where req is a FakeRequest - """ - return (self.execute(rql, args, eidkey), - self.create_request(rql=rql, **optional_args or {})) - - -class ExistingTestEnvironment(TestEnvironment): - - def __init__(self, appid, sourcefile, verbose=False): - config = ApptestConfiguration(appid, sourcefile=sourcefile) - if verbose: - print "init test database ..." - source = config.sources()['system'] - self.vreg = CubicWebVRegistry(config) - self.cnx = init_test_database(driver=source['db-driver'], - vreg=self.vreg)[1] - if verbose: - print "init done" - self.app = CubicWebPublisher(config, vreg=self.vreg) - self.verbose = verbose - # this is done when the publisher is opening a connection - self.cnx.vreg = self.vreg - - def setup(self, config=None): - """config is passed by TestSuite but is ignored in this environment""" - cursor = self.cnx.cursor() - self.last_eid = cursor.execute('Any X WHERE X creation_date D ORDERBY D DESC LIMIT 1').rows[0][0] - - def cleanup(self): - """cancel inserted elements during tests""" - cursor = self.cnx.cursor() - cursor.execute('DELETE Any X WHERE X eid > %(x)s', {'x' : self.last_eid}, eid_key='x') - print "cleaning done" - self.cnx.commit() diff -r 15d541321a8c -r 74c1597f8a82 devtools/apptest.py --- a/devtools/apptest.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,522 +0,0 @@ -"""This module provides misc utilities to test instances - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -from copy import deepcopy - -import simplejson - -from logilab.common.testlib import TestCase -from logilab.common.pytest import nocoverage -from logilab.common.umessage import message_from_string - -from logilab.common.deprecation import deprecated - -from cubicweb.devtools import init_test_database, TestServerConfiguration, ApptestConfiguration -from cubicweb.devtools._apptest import TestEnvironment -from cubicweb.devtools.fake import FakeRequest - -from cubicweb.dbapi import repo_connect, ConnectionProperties, ProgrammingError - - -MAILBOX = [] -class Email: - def __init__(self, recipients, msg): - self.recipients = recipients - self.msg = msg - - @property - def message(self): - return message_from_string(self.msg) - - @property - def subject(self): - return self.message.get('Subject') - - @property - def content(self): - return self.message.get_payload(decode=True) - - def __repr__(self): - return '' % (','.join(self.recipients), - self.message.get('Subject')) - -class MockSMTP: - def __init__(self, server, port): - pass - def close(self): - pass - def sendmail(self, helo_addr, recipients, msg): - MAILBOX.append(Email(recipients, msg)) - -from cubicweb import cwconfig -cwconfig.SMTP = MockSMTP - - -def get_versions(self, checkversions=False): - """return the a dictionary containing cubes used by this instance - as key with their version as value, including cubicweb version. This is a - public method, not requiring a session id. - - replace Repository.get_versions by this method if you don't want versions - checking - """ - vcconf = {'cubicweb': self.config.cubicweb_version()} - self.config.bootstrap_cubes() - for pk in self.config.cubes(): - version = self.config.cube_version(pk) - vcconf[pk] = version - self.config._cubes = None - return vcconf - - -@property -def late_binding_env(self): - """builds TestEnvironment as late as possible""" - if not hasattr(self, '_env'): - self.__class__._env = TestEnvironment('data', configcls=self.configcls, - requestcls=self.requestcls) - return self._env - - -class autoenv(type): - """automatically set environment on EnvBasedTC subclasses if necessary - """ - def __new__(mcs, name, bases, classdict): - env = classdict.get('env') - # try to find env in one of the base classes - if env is None: - for base in bases: - env = getattr(base, 'env', None) - if env is not None: - classdict['env'] = env - break - if not classdict.get('__abstract__') and not classdict.get('env'): - classdict['env'] = late_binding_env - return super(autoenv, mcs).__new__(mcs, name, bases, classdict) - - -class EnvBasedTC(TestCase): - """abstract class for test using an apptest environment - """ - __metaclass__ = autoenv - __abstract__ = True - env = None - configcls = ApptestConfiguration - requestcls = FakeRequest - - # user / session management ############################################### - - def user(self, req=None): - if req is None: - req = self.env.create_request() - return self.env.cnx.user(req) - else: - return req.user - - def create_user(self, *args, **kwargs): - return self.env.create_user(*args, **kwargs) - - def login(self, login, password=None): - return self.env.login(login, password) - - def restore_connection(self): - self.env.restore_connection() - - # db api ################################################################## - - @nocoverage - def cursor(self, req=None): - return self.env.cnx.cursor(req or self.request()) - - @nocoverage - def execute(self, *args, **kwargs): - return self.env.execute(*args, **kwargs) - - @nocoverage - def commit(self): - self.env.cnx.commit() - - @nocoverage - def rollback(self): - try: - self.env.cnx.rollback() - except ProgrammingError: - pass - - # other utilities ######################################################### - def set_debug(self, debugmode): - from cubicweb.server import set_debug - set_debug(debugmode) - - @property - def config(self): - return self.vreg.config - - def session(self): - """return current server side session (using default manager account)""" - return self.env.repo._sessions[self.env.cnx.sessionid] - - def request(self, *args, **kwargs): - """return a web interface request""" - return self.env.create_request(*args, **kwargs) - - @nocoverage - def rset_and_req(self, *args, **kwargs): - return self.env.get_rset_and_req(*args, **kwargs) - - def entity(self, rql, args=None, eidkey=None, req=None): - return self.execute(rql, args, eidkey, req=req).get_entity(0, 0) - - def etype_instance(self, etype, req=None): - req = req or self.request() - e = self.env.vreg['etypes'].etype_class(etype)(req) - e.eid = None - return e - - def add_entity(self, etype, **kwargs): - rql = ['INSERT %s X' % etype] - - # dict for replacement in RQL Request - rql_args = {} - - if kwargs: # - rql.append(':') - # dict to define new entities variables - entities = {} - - # assignement part of the request - sub_rql = [] - for key, value in kwargs.iteritems(): - # entities - if hasattr(value, 'eid'): - new_value = "%s__" % key.upper() - - entities[new_value] = value.eid - rql_args[new_value] = value.eid - - sub_rql.append("X %s %s" % (key, new_value)) - # final attributes - else: - sub_rql.append('X %s %%(%s)s' % (key, key)) - rql_args[key] = value - rql.append(', '.join(sub_rql)) - - - if entities: - rql.append('WHERE') - # WHERE part of the request (to link entity to they eid) - sub_rql = [] - for key, value in entities.iteritems(): - sub_rql.append("%s eid %%(%s)s" % (key, key)) - rql.append(', '.join(sub_rql)) - - rql = ' '.join(rql) - rset = self.execute(rql, rql_args) - return rset.get_entity(0, 0) - - def set_option(self, optname, value): - self.vreg.config.global_set_option(optname, value) - - def pviews(self, req, rset): - return sorted((a.id, a.__class__) for a in self.vreg['views'].possible_views(req, rset=rset)) - - def pactions(self, req, rset, skipcategories=('addrelated', 'siteactions', 'useractions', 'footer')): - return [(a.id, a.__class__) for a in self.vreg['actions'].possible_vobjects(req, rset=rset) - if a.category not in skipcategories] - - def action_submenu(self, req, rset, id): - return self._test_action(self.vreg['actions'].select(id, req, rset=rset)) - - def _test_action(self, action): - class fake_menu(list): - @property - def items(self): - return self - class fake_box(object): - def mk_action(self, label, url, **kwargs): - return (label, url) - def box_action(self, action, **kwargs): - return (action.title, action.url()) - submenu = fake_menu() - action.fill_menu(fake_box(), submenu) - return submenu - - def pactions_by_cats(self, req, rset, categories=('addrelated',)): - return [(a.id, a.__class__) for a in self.vreg['actions'].possible_vobjects(req, rset=rset) - if a.category in categories] - - paddrelactions = deprecated()(pactions_by_cats) - - def pactionsdict(self, req, rset, skipcategories=('addrelated', 'siteactions', 'useractions')): - res = {} - for a in self.vreg['actions'].possible_vobjects(req, rset=rset): - if a.category not in skipcategories: - res.setdefault(a.category, []).append(a.__class__) - return res - - - def remote_call(self, fname, *args): - """remote call simulation""" - dump = simplejson.dumps - args = [dump(arg) for arg in args] - req = self.request(fname=fname, pageid='123', arg=args) - ctrl = self.vreg['controllers'].select('json', req) - return ctrl.publish(), req - - # default test setup and teardown ######################################### - - def setup_database(self): - pass - - def setUp(self): - self.restore_connection() - session = self.session() - #self.maxeid = self.execute('Any MAX(X)') - session.set_pool() - self.maxeid = session.system_sql('SELECT MAX(eid) FROM entities').fetchone()[0] - self.app = self.env.app - self.vreg = self.env.app.vreg - self.schema = self.vreg.schema - self.vreg.config.mode = 'test' - # set default-dest-addrs to a dumb email address to avoid mailbox or - # mail queue pollution - self.set_option('default-dest-addrs', ['whatever']) - self.setup_database() - self.commit() - MAILBOX[:] = [] # reset mailbox - - @nocoverage - def tearDown(self): - self.rollback() - # self.env.restore_database() - self.env.restore_connection() - self.session().unsafe_execute('DELETE Any X WHERE X eid > %s' % self.maxeid) - self.commit() - - # global resources accessors ############################################### - -# XXX -try: - from cubicweb.web import Redirect - from urllib import unquote -except ImportError: - pass # cubicweb-web not installed -else: - class ControllerTC(EnvBasedTC): - def setUp(self): - super(ControllerTC, self).setUp() - self.req = self.request() - self.ctrl = self.vreg['controllers'].select('edit', self.req) - - def publish(self, req): - assert req is self.ctrl.req - try: - result = self.ctrl.publish() - req.cnx.commit() - except Redirect: - req.cnx.commit() - raise - return result - - def expect_redirect_publish(self, req=None): - if req is not None: - self.ctrl = self.vreg['controllers'].select('edit', req) - else: - req = self.req - try: - res = self.publish(req) - except Redirect, ex: - try: - path, params = ex.location.split('?', 1) - except: - path, params = ex.location, "" - req._url = path - cleanup = lambda p: (p[0], unquote(p[1])) - params = dict(cleanup(p.split('=', 1)) for p in params.split('&') if p) - return req.relative_path(False), params # path.rsplit('/', 1)[-1], params - else: - self.fail('expected a Redirect exception') - - -def make_late_binding_repo_property(attrname): - @property - def late_binding(self): - """builds cnx as late as possible""" - if not hasattr(self, attrname): - # sets explicit test mode here to avoid autoreload - from cubicweb.cwconfig import CubicWebConfiguration - CubicWebConfiguration.mode = 'test' - cls = self.__class__ - config = self.repo_config or TestServerConfiguration('data') - cls._repo, cls._cnx = init_test_database('sqlite', config=config) - return getattr(self, attrname) - return late_binding - - -class autorepo(type): - """automatically set repository on RepositoryBasedTC subclasses if necessary - """ - def __new__(mcs, name, bases, classdict): - repo = classdict.get('repo') - # try to find repo in one of the base classes - if repo is None: - for base in bases: - repo = getattr(base, 'repo', None) - if repo is not None: - classdict['repo'] = repo - break - if name != 'RepositoryBasedTC' and not classdict.get('repo'): - classdict['repo'] = make_late_binding_repo_property('_repo') - classdict['cnx'] = make_late_binding_repo_property('_cnx') - return super(autorepo, mcs).__new__(mcs, name, bases, classdict) - - -class RepositoryBasedTC(TestCase): - """abstract class for test using direct repository connections - """ - __metaclass__ = autorepo - repo_config = None # set a particular config instance if necessary - - # user / session management ############################################### - - def create_user(self, user, groups=('users',), password=None, commit=True): - if password is None: - password = user - eid = self.execute('INSERT CWUser X: X login %(x)s, X upassword %(p)s', - {'x': unicode(user), 'p': password})[0][0] - groups = ','.join(repr(group) for group in groups) - self.execute('SET X in_group Y WHERE X eid %%(x)s, Y name IN (%s)' % groups, - {'x': eid}) - if commit: - self.commit() - self.session.reset_pool() - return eid - - def login(self, login, password=None): - cnx = repo_connect(self.repo, unicode(login), password or login, - ConnectionProperties('inmemory')) - self.cnxs.append(cnx) - return cnx - - def current_session(self): - return self.repo._sessions[self.cnxs[-1].sessionid] - - def restore_connection(self): - assert len(self.cnxs) == 1, self.cnxs - cnx = self.cnxs.pop() - try: - cnx.close() - except Exception, ex: - print "exception occured while closing connection", ex - - # db api ################################################################## - - def execute(self, rql, args=None, eid_key=None): - assert self.session.id == self.cnxid - rset = self.__execute(self.cnxid, rql, args, eid_key) - rset.vreg = self.vreg - rset.req = self.session - # call to set_pool is necessary to avoid pb when using - # instance entities for convenience - self.session.set_pool() - return rset - - def commit(self): - self.__commit(self.cnxid) - self.session.set_pool() - - def rollback(self): - self.__rollback(self.cnxid) - self.session.set_pool() - - def close(self): - self.__close(self.cnxid) - - # other utilities ######################################################### - - def set_debug(self, debugmode): - from cubicweb.server import set_debug - set_debug(debugmode) - - def set_option(self, optname, value): - self.vreg.config.global_set_option(optname, value) - - def add_entity(self, etype, **kwargs): - restrictions = ', '.join('X %s %%(%s)s' % (key, key) for key in kwargs) - rql = 'INSERT %s X' % etype - if kwargs: - rql += ': %s' % ', '.join('X %s %%(%s)s' % (key, key) for key in kwargs) - rset = self.execute(rql, kwargs) - return rset.get_entity(0, 0) - - def default_user_password(self): - config = self.repo.config #TestConfiguration('data') - user = unicode(config.sources()['system']['db-user']) - passwd = config.sources()['system']['db-password'] - return user, passwd - - def close_connections(self): - for cnx in self.cnxs: - try: - cnx.rollback() - cnx.close() - except: - continue - self.cnxs = [] - - pactions = EnvBasedTC.pactions.im_func - pactionsdict = EnvBasedTC.pactionsdict.im_func - - # default test setup and teardown ######################################### - - def _prepare(self): - MAILBOX[:] = [] # reset mailbox - if hasattr(self, 'cnxid'): - return - repo = self.repo - self.__execute = repo.execute - self.__commit = repo.commit - self.__rollback = repo.rollback - self.__close = repo.close - self.cnxid = self.cnx.sessionid - self.session = repo._sessions[self.cnxid] - self.cnxs = [] - # reset caches, they may introduce bugs among tests - repo._type_source_cache = {} - repo._extid_cache = {} - repo.querier._rql_cache = {} - for source in repo.sources: - source.reset_caches() - for s in repo.sources: - if hasattr(s, '_cache'): - s._cache = {} - - @property - def config(self): - return self.repo.config - - @property - def vreg(self): - return self.repo.vreg - - @property - def schema(self): - return self.repo.schema - - def setUp(self): - self._prepare() - self.session.set_pool() - self.maxeid = self.session.system_sql('SELECT MAX(eid) FROM entities').fetchone()[0] - - def tearDown(self): - self.close_connections() - self.rollback() - self.session.unsafe_execute('DELETE Any X WHERE X eid > %(x)s', {'x': self.maxeid}) - self.commit() - diff -r 15d541321a8c -r 74c1597f8a82 devtools/dataimport.py --- a/devtools/dataimport.py Wed Jan 20 10:13:02 2010 +0100 +++ b/devtools/dataimport.py Wed Jan 20 10:13:45 2010 +0100 @@ -51,16 +51,52 @@ """ __docformat__ = "restructuredtext en" -import sys, csv, traceback +import sys +import csv +import traceback +import os.path as osp +from StringIO import StringIO from logilab.common import shellutils +from logilab.common.deprecation import deprecated -def utf8csvreader(file, encoding='utf-8', separator=',', quote='"'): - """A csv reader that accepts files with any encoding and outputs - unicode strings.""" - for row in csv.reader(file, delimiter=separator, quotechar=quote): +def ucsvreader_pb(filepath, encoding='utf-8', separator=',', quote='"', + skipfirst=False, withpb=True): + """same as ucsvreader but a progress bar is displayed as we iter on rows""" + if not osp.exists(filepath): + raise Exception("file doesn't exists: %s" % filepath) + rowcount = int(shellutils.Execute('wc -l "%s"' % filepath).out.strip().split()[0]) + if skipfirst: + rowcount -= 1 + if withpb: + pb = shellutils.ProgressBar(rowcount, 50) + for urow in ucsvreader(file(filepath), encoding, separator, quote, skipfirst): + yield urow + if withpb: + pb.update() + print ' %s rows imported' % rowcount + +def ucsvreader(stream, encoding='utf-8', separator=',', quote='"', + skipfirst=False): + """A csv reader that accepts files with any encoding and outputs unicode + strings + """ + it = iter(csv.reader(stream, delimiter=separator, quotechar=quote)) + if skipfirst: + it.next() + for row in it: yield [item.decode(encoding) for item in row] +utf8csvreader = deprecated('use ucsvreader instead')(ucsvreader) + +def commit_every(nbit, store, it): + for i, x in enumerate(it): + yield x + if nbit is not None and i % nbit: + store.checkpoint() + if nbit is not None: + store.checkpoint() + def lazytable(reader): """The first row is taken to be the header of the table and used to output a dict for each row of data. @@ -71,10 +107,56 @@ for row in reader: yield dict(zip(header, row)) +def mk_entity(row, map): + """Return a dict made from sanitized mapped values. + + >>> row = {'myname': u'dupont'} + >>> map = [('myname', u'name', (capitalize_if_unicase,))] + >>> mk_entity(row, map) + {'name': u'Dupont'} + """ + res = {} + for src, dest, funcs in map: + res[dest] = row[src] + for func in funcs: + res[dest] = func(res[dest]) + return res + + +# user interactions ############################################################ + def tell(msg): print msg -# base sanitizing functions ##### +def confirm(question): + """A confirm function that asks for yes/no/abort and exits on abort.""" + answer = shellutils.ASK.ask(question, ('Y','n','abort'), 'Y') + if answer == 'abort': + sys.exit(1) + return answer == 'Y' + + +class catch_error(object): + """Helper for @contextmanager decorator.""" + + def __init__(self, ctl, key='unexpected error', msg=None): + self.ctl = ctl + self.key = key + self.msg = msg + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + if type is not None: + if issubclass(type, (KeyboardInterrupt, SystemExit)): + return # re-raise + if self.ctl.catcherrors: + self.ctl.record_error(self.key, None, type, value, traceback) + return True # silent + + +# base sanitizing functions #################################################### def capitalize_if_unicase(txt): if txt.isupper() or txt.islower(): @@ -99,30 +181,19 @@ def strip(txt): return txt.strip() -# base checks ##### + +# base integrity checking functions ############################################ def check_doubles(buckets): """Extract the keys that have more than one item in their bucket.""" return [(key, len(value)) for key,value in buckets.items() if len(value) > 1] -# make entity helper ##### - -def mk_entity(row, map): - """Return a dict made from sanitized mapped values. +def check_doubles_not_none(buckets): + """Extract the keys that have more than one item in their bucket.""" + return [(key, len(value)) for key,value in buckets.items() if key is not None and len(value) > 1] - >>> row = {'myname': u'dupont'} - >>> map = [('myname', u'name', (capitalize_if_unicase,))] - >>> mk_entity(row, map) - {'name': u'Dupont'} - """ - res = {} - for src, dest, funcs in map: - res[dest] = row[src] - for func in funcs: - res[dest] = func(res[dest]) - return res -# object stores +# object stores ################################################################# class ObjectStore(object): """Store objects in memory for faster testing. Will not @@ -181,27 +252,52 @@ if item[key] == value: yield item - def rql(self, query, args): - if self._rql: - return self._rql(query, args) + def checkpoint(self): + pass - def checkpoint(self): - if self._checkpoint: - self._checkpoint() class RQLObjectStore(ObjectStore): """ObjectStore that works with an actual RQL repository.""" + _rql = None # bw compat + + def __init__(self, session=None, checkpoint=None): + ObjectStore.__init__(self) + if session is not None: + if not hasattr(session, 'set_pool'): + # connection + cnx = session + session = session.request() + session.set_pool = lambda : None + checkpoint = checkpoint or cnx.commit + self.session = session + self.checkpoint = checkpoint or session.commit + elif checkpoint is not None: + self.checkpoint = checkpoint + + def rql(self, *args): + if self._rql is not None: + return self._rql(*args) + self.session.set_pool() + return self.session.execute(*args) + + def create_entity(self, *args, **kwargs): + self.session.set_pool() + entity = self.session.create_entity(*args, **kwargs) + self.eids[entity.eid] = entity + self.types.setdefault(args[0], []).append(entity.eid) + return entity def _put(self, type, item): query = ('INSERT %s X: ' % type) + ', '.join(['X %s %%(%s)s' % (key,key) for key in item]) return self.rql(query, item)[0][0] def relate(self, eid_from, rtype, eid_to): - query = 'SET X %s Y WHERE X eid %%(from)s, Y eid %%(to)s' % rtype - self.rql(query, {'from': int(eid_from), 'to': int(eid_to)}) + self.rql('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype, + {'x': int(eid_from), 'y': int(eid_to)}, ('x', 'y')) self.relations.add( (eid_from, rtype, eid_to) ) -# import controller ##### + +# the import controller ######################################################## class CWImportController(object): """Controller of the data import process. @@ -212,12 +308,17 @@ >>> ctl.run() """ - def __init__(self, store): + def __init__(self, store, askerror=False, catcherrors=None, tell=tell, + commitevery=50): self.store = store self.generators = None self.data = {} self.errors = None - self.askerror = False + self.askerror = askerror + if catcherrors is None: + catcherrors = askerror + self.catcherrors = catcherrors + self.commitevery = commitevery # set to None to do a single commit self._tell = tell def check(self, type, key, value): @@ -230,34 +331,47 @@ self.check(key, entity[key], None) entity[key] = default + def record_error(self, key, msg=None, type=None, value=None, tb=None): + tmp = StringIO() + if type is None: + traceback.print_exc(file=tmp) + else: + traceback.print_exception(type, value, tb, file=tmp) + print tmp.getvalue() + # use a list to avoid counting a errors instead of one + errorlog = self.errors.setdefault(key, []) + if msg is None: + errorlog.append(tmp.getvalue().splitlines()) + else: + errorlog.append( (msg, tmp.getvalue().splitlines()) ) + def run(self): self.errors = {} for func, checks in self.generators: self._checks = {} - func_name = func.__name__[4:] - question = 'Importation de %s' % func_name - self.tell(question) + func_name = func.__name__[4:] # XXX + self.tell('Importing %s' % func_name) try: func(self) except: - import StringIO - tmp = StringIO.StringIO() - traceback.print_exc(file=tmp) - print tmp.getvalue() - self.errors[func_name] = ('Erreur lors de la transformation', - tmp.getvalue().splitlines()) + if self.catcherrors: + self.record_error(func_name, 'While calling %s' % func.__name__) + else: + raise for key, func, title, help in checks: buckets = self._checks.get(key) if buckets: err = func(buckets) if err: self.errors[title] = (help, err) - self.store.checkpoint() - errors = sum(len(err[1]) for err in self.errors.values()) - self.tell('Importation terminée. (%i objets, %i types, %i relations et %i erreurs).' + self.store.checkpoint() + self.tell('\nImport completed: %i entities (%i types), %i relations' % (len(self.store.eids), len(self.store.types), - len(self.store.relations), errors)) - if self.errors and self.askerror and confirm('Afficher les erreurs ?'): + len(self.store.relations))) + nberrors = sum(len(err[1]) for err in self.errors.values()) + if nberrors: + print '%s errors' % nberrors + if self.errors and self.askerror and confirm('Display errors?'): import pprint pprint.pprint(self.errors) @@ -270,9 +384,6 @@ def tell(self, msg): self._tell(msg) -def confirm(question): - """A confirm function that asks for yes/no/abort and exits on abort.""" - answer = shellutils.ASK.ask(question, ('Y','n','abort'), 'Y') - if answer == 'abort': - sys.exit(1) - return answer == 'Y' + def iter_and_commit(self, datakey): + """iter rows, triggering commit every self.commitevery iterations""" + return commit_every(self.commitevery, self.store, self.get_data(datakey)) diff -r 15d541321a8c -r 74c1597f8a82 devtools/devctl.py --- a/devtools/devctl.py Wed Jan 20 10:13:02 2010 +0100 +++ b/devtools/devctl.py Wed Jan 20 10:13:45 2010 +0100 @@ -22,13 +22,13 @@ from logilab.common.clcommands import register_commands from cubicweb.__pkginfo__ import version as cubicwebversion -from cubicweb import (CW_SOFTWARE_ROOT as BASEDIR, BadCommandUsage, - underline_title) +from cubicweb import CW_SOFTWARE_ROOT as BASEDIR, BadCommandUsage +from cubicweb.toolsutils import Command, copy_skeleton, underline_title from cubicweb.schema import CONSTRAINTS -from cubicweb.toolsutils import Command, copy_skeleton from cubicweb.web.webconfig import WebConfiguration from cubicweb.server.serverconfig import ServerConfiguration + class DevCubeConfiguration(ServerConfiguration, WebConfiguration): """dummy config to get full library schema and entities""" creating = True @@ -113,7 +113,7 @@ def _generate_schema_pot(w, vreg, schema, libconfig=None, cube=None): - from cubicweb.common.i18n import add_msg + from cubicweb.i18n import add_msg from cubicweb.web import uicfg from cubicweb.schema import META_RTYPES, SYSTEM_RTYPES no_context_rtypes = META_RTYPES | SYSTEM_RTYPES @@ -125,19 +125,19 @@ if libconfig is not None: from cubicweb.cwvreg import CubicWebVRegistry, clear_rtag_objects libschema = libconfig.load_schema(remove_unused_rtypes=False) - rinlined = deepcopy(uicfg.autoform_is_inlined) + afs = deepcopy(uicfg.autoform_section) appearsin_addmenu = deepcopy(uicfg.actionbox_appearsin_addmenu) clear_rtag_objects() cleanup_sys_modules(libconfig) libvreg = CubicWebVRegistry(libconfig) libvreg.set_schema(libschema) # trigger objects registration - librinlined = uicfg.autoform_is_inlined + libafs = uicfg.autoform_section libappearsin_addmenu = uicfg.actionbox_appearsin_addmenu # prefill vregdone set list(_iter_vreg_objids(libvreg, vregdone)) else: libschema = {} - rinlined = uicfg.autoform_is_inlined + afs = uicfg.autoform_section appearsin_addmenu = uicfg.actionbox_appearsin_addmenu for cstrtype in CONSTRAINTS: add_msg(w, cstrtype) @@ -157,9 +157,11 @@ continue for rschema, targetschemas, role in eschema.relation_definitions(True): for tschema in targetschemas: - if rinlined.etype_get(eschema, rschema, role, tschema) and \ + fsections = afs.etype_get(eschema, rschema, role, tschema) + if 'inlined_attributes' in fsections and \ (libconfig is None or not - librinlined.etype_get(eschema, rschema, role, tschema)): + 'inlined_attributes' in libafs.etype_get( + eschema, rschema, role, tschema)): add_msg(w, 'add a %s' % tschema, 'inlined:%s.%s.%s' % (etype, rschema, role)) add_msg(w, 'remove this %s' % tschema, @@ -217,14 +219,19 @@ add_msg(w, '%s_description' % objid) add_msg(w, objid) + def _iter_vreg_objids(vreg, done, prefix=None): for reg, objdict in vreg.items(): for objects in objdict.values(): for obj in objects: - objid = '%s_%s' % (reg, obj.id) + objid = '%s_%s' % (reg, obj.__regid__) if objid in done: break - if obj.property_defs: + try: # XXX < 3.6 bw compat + pdefs = obj.property_defs + except AttributeError: + pdefs = getattr(obj, 'cw_property_defs', {}) + if pdefs: yield objid done.add(objid) break @@ -279,7 +286,7 @@ import yams from logilab.common.fileutils import ensure_fs_mode from logilab.common.shellutils import globfind, find, rm - from cubicweb.common.i18n import extract_from_tal, execute + from cubicweb.i18n import extract_from_tal, execute tempdir = tempfile.mkdtemp() potfiles = [join(I18NDIR, 'static-messages.pot')] print '-> extract schema messages.' @@ -372,7 +379,7 @@ import tempfile from logilab.common.fileutils import ensure_fs_mode from logilab.common.shellutils import find, rm - from cubicweb.common.i18n import extract_from_tal, execute + from cubicweb.i18n import extract_from_tal, execute toedit = [] cube = basename(normpath(cubedir)) tempdir = tempfile.mkdtemp() diff -r 15d541321a8c -r 74c1597f8a82 devtools/fake.py --- a/devtools/fake.py Wed Jan 20 10:13:02 2010 +0100 +++ b/devtools/fake.py Wed Jan 20 10:13:45 2010 +0100 @@ -7,12 +7,11 @@ """ __docformat__ = "restructuredtext en" -from logilab.common.testlib import mock_object as Mock from logilab.common.adbh import get_adv_func_helper from indexer import get_indexer -from cubicweb import RequestSessionMixIn +from cubicweb.req import RequestSessionBase from cubicweb.cwvreg import CubicWebVRegistry from cubicweb.web.request import CubicWebRequestBase from cubicweb.devtools import BASE_URL, BaseApptestConfiguration @@ -81,15 +80,15 @@ def set_header(self, header, value): """set an output HTTP header""" - pass + self._headers[header] = value def add_header(self, header, value): """set an output HTTP header""" - pass + self._headers[header] = value # XXX def remove_header(self, header): """remove an output HTTP header""" - pass + self._headers.pop(header, None) def get_header(self, header, default=None): """return the value associated with the given input header, @@ -97,16 +96,24 @@ """ return self._headers.get(header, default) - def set_cookie(self, cookie, key, maxage=300): + def set_cookie(self, cookie, key, maxage=300, expires=None): """set / update a cookie key by default, cookie will be available for the next 5 minutes """ - pass + morsel = cookie[key] + if maxage is not None: + morsel['Max-Age'] = maxage + if expires: + morsel['expires'] = expires.strftime('%a, %d %b %Y %H:%M:%S %z') + # make sure cookie is set on the correct path + morsel['path'] = self.base_url_path() + self.add_header('Set-Cookie', morsel.OutputString()) + self.add_header('Cookie', morsel.OutputString()) def remove_cookie(self, cookie, key): - """remove a cookie by expiring it""" - pass + self.remove_header('Set-Cookie') + self.remove_header('Cookie') def validate_cache(self): pass @@ -130,7 +137,7 @@ return True -class FakeSession(RequestSessionMixIn): +class FakeSession(RequestSessionBase): def __init__(self, repo=None, user=None): self.repo = repo self.vreg = getattr(self.repo, 'vreg', CubicWebVRegistry(FakeConfig(), initlog=False)) diff -r 15d541321a8c -r 74c1597f8a82 devtools/fill.py --- a/devtools/fill.py Wed Jan 20 10:13:02 2010 +0100 +++ b/devtools/fill.py Wed Jan 20 10:13:45 2010 +0100 @@ -30,7 +30,7 @@ """returns possible choices for 'attrname' if attrname doesn't have ChoiceConstraint, return None """ - for cst in eschema.constraints(attrname): + for cst in eschema.rdef(attrname).constraints: if isinstance(cst, StaticVocabularyConstraint): return cst.vocabulary() return None @@ -38,14 +38,14 @@ def get_max_length(eschema, attrname): """returns the maximum length allowed for 'attrname'""" - for cst in eschema.constraints(attrname): + for cst in eschema.rdef(attrname).constraints: if isinstance(cst, SizeConstraint) and cst.max: return cst.max return 300 #raise AttributeError('No Size constraint on attribute "%s"' % attrname) def get_bounds(eschema, attrname): - for cst in eschema.constraints(attrname): + for cst in eschema.rdef(attrname).constraints: if isinstance(cst, IntervalBoundConstraint): return cst.minvalue, cst.maxvalue return None, None @@ -112,7 +112,7 @@ attrlength = get_max_length(self.e_schema, attrname) num_len = numlen(index) if num_len >= attrlength: - ascii = self.e_schema.rproperty(attrname, 'internationalizable') + ascii = self.e_schema.rdef(attrname).internationalizable return ('&'+decompose_b26(index, ascii))[:attrlength] # always use plain text when no format is specified attrprefix = attrname[:max(attrlength-num_len-1, 0)] @@ -340,10 +340,10 @@ def composite_relation(rschema): for obj in rschema.objects(): - if obj.objrproperty(rschema, 'composite') == 'subject': + if obj.rdef(rschema, 'object').composite == 'subject': return True for obj in rschema.subjects(): - if obj.subjrproperty(rschema, 'composite') == 'object': + if obj.rdef(rschema, 'subject').composite == 'object': return True return False @@ -372,11 +372,11 @@ oedict = deepcopy(edict) delayed = [] # for each couple (subjschema, objschema), insert relations - for subj, obj in rschema.iter_rdefs(): + for subj, obj in rschema.rdefs: sym.add( (subj, obj) ) if rschema.symetric and (obj, subj) in sym: continue - subjcard, objcard = rschema.rproperty(subj, obj, 'cardinality') + subjcard, objcard = rschema.rdef(subj, obj).cardinality # process mandatory relations first if subjcard in '1+' or objcard in '1+' or composite_relation(rschema): for query, args in self.make_relation_queries(sedict, oedict, @@ -397,14 +397,15 @@ return {'subjeid' : subjeid, 'objeid' : objeid} def make_relation_queries(self, sedict, oedict, rschema, subj, obj): - subjcard, objcard = rschema.rproperty(subj, obj, 'cardinality') + rdef = rschema.rdef(subj, obj) + subjcard, objcard = rdef.cardinality subjeids = sedict.get(subj, frozenset()) used = self.existingrels[rschema.type] preexisting_subjrels = set(subj for subj, obj in used) preexisting_objrels = set(obj for subj, obj in used) # if there are constraints, only select appropriate objeids q = self.rql_tmpl % rschema.type - constraints = [c for c in rschema.rproperty(subj, obj, 'constraints') + constraints = [c for c in rdef.constraints if isinstance(c, RQLConstraint)] if constraints: restrictions = ', '.join(c.restriction for c in constraints) diff -r 15d541321a8c -r 74c1597f8a82 devtools/htmlparser.py --- a/devtools/htmlparser.py Wed Jan 20 10:13:02 2010 +0100 +++ b/devtools/htmlparser.py Wed Jan 20 10:13:45 2010 +0100 @@ -174,3 +174,5 @@ except KeyError: continue return False + +VALMAP = {None: None, 'dtd': DTDValidator, 'xml': SaxOnlyValidator} diff -r 15d541321a8c -r 74c1597f8a82 devtools/livetest.py --- a/devtools/livetest.py Wed Jan 20 10:13:02 2010 +0100 +++ b/devtools/livetest.py Wed Jan 20 10:13:45 2010 +0100 @@ -6,9 +6,10 @@ :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ +import os import socket import logging -from os.path import join, dirname, exists +from os.path import join, dirname, normpath, abspath from StringIO import StringIO #from twisted.application import service, strports @@ -21,10 +22,9 @@ from logilab.common.testlib import TestCase -import cubicweb.web from cubicweb.dbapi import in_memory_cnx from cubicweb.etwist.server import CubicWebRootResource -from cubicweb.devtools import LivetestConfiguration, init_test_database +from cubicweb.devtools import BaseApptestConfiguration, init_test_database @@ -50,25 +50,57 @@ +class LivetestConfiguration(BaseApptestConfiguration): + init_repository = False + + def __init__(self, cube=None, sourcefile=None, pyro_name=None, + log_threshold=logging.CRITICAL): + BaseApptestConfiguration.__init__(self, cube, log_threshold=log_threshold) + self.appid = pyro_name or cube + # don't change this, else some symlink problems may arise in some + # environment (e.g. mine (syt) ;o) + # XXX I'm afraid this test will prevent to run test from a production + # environment + self._sources = None + # instance cube test + if cube is not None: + self.apphome = self.cube_dir(cube) + elif 'web' in os.getcwd().split(os.sep): + # web test + self.apphome = join(normpath(join(dirname(__file__), '..')), 'web') + else: + # cube test + self.apphome = abspath('..') + self.sourcefile = sourcefile + self.global_set_option('realm', '') + self.use_pyro = pyro_name is not None + + def pyro_enabled(self): + if self.use_pyro: + return True + else: + return False + + + def make_site(cube, options=None): from cubicweb.etwist import twconfig # trigger configuration registration - sourcefile = options.sourcefile - config = LivetestConfiguration(cube, sourcefile, + config = LivetestConfiguration(cube, options.sourcefile, pyro_name=options.pyro_name, log_threshold=logging.DEBUG) - source = config.sources()['system'] - init_test_database(driver=source['db-driver'], config=config) + init_test_database(config=config) # if '-n' in sys.argv: # debug mode cubicweb = LivetestResource(config, debug=True) toplevel = cubicweb website = server.Site(toplevel) cube_dir = config.cube_dir(cube) + source = config.sources()['system'] for port in xrange(7777, 7798): try: reactor.listenTCP(port, channel.HTTPFactory(website)) saveconf(cube_dir, port, source['db-user'], source['db-password']) break - except CannotListenError, exc: + except CannotListenError: print "port %s already in use, I will try another one" % port else: raise @@ -119,7 +151,7 @@ # build a config, and get a connection self.config = LivetestConfiguration(self.cube, self.sourcefile) _, user, passwd, _ = loadconf() - self.repo, self.cnx = in_memory_cnx(self.config, user, passwd) + self.repo, self.cnx = in_memory_cnx(self.config, user, password=passwd) self.setup_db(self.cnx) def tearDown(self): diff -r 15d541321a8c -r 74c1597f8a82 devtools/migrtest.py --- a/devtools/migrtest.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,153 +0,0 @@ -"""Migration test script - -* migration will be played into a chroot of the local machine -* the database server used can be configured -* test tested instance may be on another host - - -We are using postgres'.pgpass file. Here is a copy of postgres documentation -about that: - -The file .pgpass in a user's home directory or the file referenced by -PGPASSFILE can contain passwords to be used if the connection requires -a password (and no password has been specified otherwise). - - -This file should contain lines of the following format: - -hostname:port:database:username:password - -Each of the first four fields may be a literal value, or *, which -matches anything. The password field from the first line that matches -the current connection parameters will be used. (Therefore, put -more-specific entries first when you are using wildcards.) If an entry -needs to contain : or \, escape this character with \. A hostname of -localhost matches both host (TCP) and local (Unix domain socket) -connections coming from the local machine. - -The permissions on .pgpass must disallow any access to world or group; -achieve this by the command chmod 0600 ~/.pgpass. If the permissions -are less strict than this, the file will be ignored. - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -from os import system -from os.path import join, basename - -from logilab.common.shellutils import cp, rm - -from cubicweb.toolsutils import read_config -from cubicweb.server.utils import generate_sources_file - -# XXXX use db-copy instead - -# test environment configuration -chrootpath = '/sandbox/cubicwebtest' -tmpdbhost = 'crater' -tmpdbuser = 'syt' -tmpdbpasswd = 'syt' - -def play_migration(applhome, applhost='', sudo=False): - applid = dbname = basename(applhome) - testapplhome = join(chrootpath, applhome) - # copy instance into the chroot - if applhost: - system('scp -r %s:%s %s' % (applhost, applhome, testapplhome)) - else: - cp(applhome, testapplhome) -## # extract db parameters -## sources = read_config(join(testapplhome, 'sources')) -## dbname = sources['system']['db-name'] -## dbhost = sources['system'].get('db-host') or '' -## dbuser = sources['system'].get('db-user') or '' -## dbpasswd = sources['system'].get('db-password') or '' - # generate sources file - # XXX multisources - sources = {'system': {}} - sources['system']['db-encoding'] = 'UTF8' # XXX - sources['system']['db-name'] = dbname - sources['system']['db-host'] = None - sources['system']['db-user'] = tmpdbuser - sources['system']['db-password'] = None - generate_sources_file(applid, join(testapplhome, 'sources'), sources) -## # create postgres password file so we won't need anymore passwords -## # XXX may exist! -## pgpassfile = expanduser('~/.pgpass') -## pgpass = open(pgpassfile, 'w') -## if dbpasswd: -## pgpass.write('%s:*:%s:%s:%s\n' % (dbhost or applhost or 'localhost', -## dbname, dbuser, dbpasswd)) -## if tmpdbpasswd: -## pgpass.write('%s:*:%s:%s:%s\n' % (tmpdbhost or 'localhost', dbname, -## tmpdbuser, tmpdbpasswd)) -## pgpass.close() -## chmod(pgpassfile, 0600) - # dump db -## dumpcmd = 'pg_dump -Fc -U %s -f /tmp/%s.dump %s' % ( -## dbuser, dbname, dbname) -## if dbhost: -## dumpcmd += ' -h %s' % dbhost - dumpfile = '/tmp/%s.dump' % applid - dumpcmd = 'cubicweb-ctl db-dump --output=%s %s' % (dumpfile, applid) - if sudo: - dumpcmd = 'sudo %s' % dumpcmd - if applhost: - dumpcmd = 'ssh %s "%s"' % (applhost, dumpcmd) - if system(dumpcmd): - raise Exception('error while dumping the database') -## if not dbhost and applhost: - if applhost: - # retrieve the dump - if system('scp %s:%s %s' % (applhost, dumpfile, dumpfile)): - raise Exception('error while retreiving the dump') - # move the dump into the chroot - system('mv %s %s%s' % (dumpfile, chrootpath, dumpfile)) - # locate installed versions - vcconf = read_config(join(testapplhome, 'vc.conf')) - template = vcconf['TEMPLATE'] - cubicwebversion = vcconf['CW'] - templversion = vcconf['TEMPLATE_VERSION'] - # install the same versions cubicweb and template versions into the chroot - system('sudo chroot %s apt-get update' % chrootpath) - system('sudo chroot %s apt-get install cubicweb-server=%s cubicweb-client=%s' - % (chrootpath, cubicwebversion, cubicwebversion)) - system('sudo chroot %s apt-get install cubicweb-%s-appl-server=%s cubicweb-%s-appl-client=%s' - % (chrootpath, template, templversion, template, templversion)) - # update and upgrade to the latest version - system('sudo chroot %s apt-get install cubicweb-server cubicweb-client' % chrootpath) - system('sudo chroot %s apt-get install cubicweb-%s-appl-server cubicweb-%s-appl-client' - % (chrootpath, template, template)) - # create and fill the database - system('sudo chroot cubicweb-ctl db-restore %s %s' % (applid, dumpfile)) -## if not tmpdbhost: -## system('createdb -U %s -T template0 -E UTF8 %s' % (tmpdbuser, dbname)) -## system('pg_restore -U %s -O -Fc -d %s /tmp/%s.dump' -## % (tmpdbuser, dbname, dbname)) -## else: -## system('createdb -h %s -U %s -T template0 -E UTF8 %s' -## % (tmpdbhost, tmpdbuser, dbname)) -## system('pg_restore -h %s -U %s -O -Fc -d %s /tmp/%s.dump' -## % (tmpdbhost, tmpdbuser, dbname, dbname)) - # launch upgrade - system('sudo chroot %s cubicweb-ctl upgrade %s' % (chrootpath, applid)) - - # cleanup - rm(testapplhome) -## rm(pgpassfile) -## if tmpdbhost: -## system('dropdb -h %s -U %s %s' % (tmpdbuser, tmpdbhost, dbname)) -## else: -## system('dropdb -U %s %s' % (tmpdbuser, dbname)) -## if not dbhost and applhost: - if applhost: - system('ssh %s rm %s' % (applhost, dumpfile)) - rm('%s%s' % (chrootpath, dumpfile)) - - -if __name__ == '__main__': - play_migration('/etc/cubicweb.d/jpl', 'lepus') diff -r 15d541321a8c -r 74c1597f8a82 devtools/realdbtest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/realdbtest.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,43 @@ +import logging +from cubicweb import toolsutils +from cubicweb.devtools import DEFAULT_SOURCES, BaseApptestConfiguration + +class RealDatabaseConfiguration(BaseApptestConfiguration): + init_repository = False + sourcesdef = DEFAULT_SOURCES.copy() + + def sources(self): + """ + By default, we run tests with the sqlite DB backend. + One may use its own configuration by just creating a + 'sources' file in the test directory from wich tests are + launched. + """ + self._sources = self.sourcesdef + return self._sources + + +def buildconfig(dbuser, dbpassword, dbname, adminuser, adminpassword, dbhost=None): + """convenience function that builds a real-db configuration class""" + sourcesdef = {'system': {'adapter' : 'native', + 'db-encoding' : 'UTF-8', #'ISO-8859-1', + 'db-user' : dbuser, + 'db-password' : dbpassword, + 'db-name' : dbname, + 'db-driver' : 'postgres', + 'db-host' : dbhost, + }, + 'admin' : {'login': adminuser, + 'password': adminpassword, + }, + } + return type('MyRealDBConfig', (RealDatabaseConfiguration,), + {'sourcesdef': sourcesdef}) + + +def loadconfig(filename): + """convenience function that builds a real-db configuration class + from a file + """ + return type('MyRealDBConfig', (RealDatabaseConfiguration,), + {'sourcesdef': toolsutils.read_config(filename)}) diff -r 15d541321a8c -r 74c1597f8a82 devtools/repotest.py --- a/devtools/repotest.py Wed Jan 20 10:13:02 2010 +0100 +++ b/devtools/repotest.py Wed Jan 20 10:13:45 2010 +0100 @@ -174,17 +174,18 @@ rqlhelper._analyser.uid_func_mapping = {} return rqlhelper - def _prepare_plan(self, rql, kwargs=None): + def _prepare_plan(self, rql, kwargs=None, simplify=True): rqlhelper = self._rqlhelper() rqlst = rqlhelper.parse(rql) rqlhelper.compute_solutions(rqlst, kwargs=kwargs) - rqlhelper.simplify(rqlst) + if simplify: + rqlhelper.simplify(rqlst) for select in rqlst.children: select.solutions.sort() return self.o.plan_factory(rqlst, kwargs, self.session) def _prepare(self, rql, kwargs=None): - plan = self._prepare_plan(rql, kwargs) + plan = self._prepare_plan(rql, kwargs, simplify=False) plan.preprocess(plan.rqlst) rqlst = plan.rqlst.children[0] rqlst.solutions = remove_unused_solutions(rqlst, rqlst.solutions, {}, self.repo.schema)[0] diff -r 15d541321a8c -r 74c1597f8a82 devtools/stresstester.py --- a/devtools/stresstester.py Wed Jan 20 10:13:02 2010 +0100 +++ b/devtools/stresstester.py Wed Jan 20 10:13:45 2010 +0100 @@ -156,7 +156,7 @@ # get local access to the repository print "Creating repo", prof_file repo = Repository(config, prof_file) - cnxid = repo.connect(user, password) + cnxid = repo.connect(user, password=password) # connection to the CubicWeb repository repo_cnx = Connection(repo, cnxid) repo_cursor = repo_cnx.cursor() diff -r 15d541321a8c -r 74c1597f8a82 devtools/test/unittest_testlib.py --- a/devtools/test/unittest_testlib.py Wed Jan 20 10:13:02 2010 +0100 +++ b/devtools/test/unittest_testlib.py Wed Jan 20 10:13:45 2010 +0100 @@ -10,11 +10,11 @@ from unittest import TestSuite -from logilab.common.testlib import (TestCase, unittest_main, mock_object, +from logilab.common.testlib import (TestCase, unittest_main, SkipAwareTextTestRunner) + from cubicweb.devtools import htmlparser - -from cubicweb.devtools.testlib import WebTest, EnvBasedTC +from cubicweb.devtools.testlib import CubicWebTC class WebTestTC(TestCase): @@ -23,7 +23,7 @@ self.runner = SkipAwareTextTestRunner(stream=output) def test_error_raised(self): - class MyWebTest(WebTest): + class MyWebTest(CubicWebTC): def test_error_view(self): self.add_entity('Bug', title=u"bt") @@ -39,7 +39,7 @@ self.assertEquals(len(result.failures), 1) -class TestLibTC(EnvBasedTC): +class TestLibTC(CubicWebTC): def test_add_entity_with_relation(self): bug = self.add_entity(u'Bug', title=u"toto") self.add_entity(u'Bug', title=u"tata", identical_to=bug) diff -r 15d541321a8c -r 74c1597f8a82 devtools/testlib.py --- a/devtools/testlib.py Wed Jan 20 10:13:02 2010 +0100 +++ b/devtools/testlib.py Wed Jan 20 10:13:45 2010 +0100 @@ -1,4 +1,4 @@ -"""this module contains base classes for web tests +"""this module contains base classes and utilities for cubicweb tests :organization: Logilab :copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. @@ -7,85 +7,538 @@ """ __docformat__ = "restructuredtext en" +import os import sys +import re +from urllib import unquote from math import log -from logilab.common.debugger import Debugger -from logilab.common.testlib import InnerTest -from logilab.common.pytest import nocoverage +import simplejson + +import yams.schema -from cubicweb import ValidationError -from cubicweb.devtools import VIEW_VALIDATORS -from cubicweb.devtools.apptest import EnvBasedTC -from cubicweb.devtools._apptest import unprotected_entities, SYSTEM_RELATIONS -from cubicweb.devtools.htmlparser import DTDValidator, SaxOnlyValidator, HTMLValidator -from cubicweb.devtools.fill import insert_entity_queries, make_relations_queries +from logilab.common.testlib import TestCase, InnerTest +from logilab.common.pytest import nocoverage, pause_tracing, resume_tracing +from logilab.common.debugger import Debugger +from logilab.common.umessage import message_from_string +from logilab.common.decorators import cached, classproperty, clear_cache +from logilab.common.deprecation import deprecated -from cubicweb.sobjects.notification import NotificationView - -from cubicweb.vregistry import NoSelectableObject +from cubicweb import ValidationError, NoSelectableObject, AuthenticationError +from cubicweb import cwconfig, devtools, web, server +from cubicweb.dbapi import repo_connect, ConnectionProperties, ProgrammingError +from cubicweb.sobjects import notification +from cubicweb.web import Redirect, application +from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS +from cubicweb.devtools import fake, htmlparser -## TODO ############### -# creation tests: make sure an entity was actually created -# Existing Test Environment +# low-level utilities ########################################################## class CubicWebDebugger(Debugger): - + """special debugger class providing a 'view' function which saves some + html into a temporary file and open a web browser to examinate it. + """ def do_view(self, arg): import webbrowser data = self._getval(arg) file('/tmp/toto.html', 'w').write(data) webbrowser.open('file:///tmp/toto.html') -def how_many_dict(schema, cursor, how_many, skip): - """compute how many entities by type we need to be able to satisfy relations - cardinality - """ - # compute how many entities by type we need to be able to satisfy relation constraint - relmap = {} - for rschema in schema.relations(): - if rschema.final: - continue - for subj, obj in rschema.iter_rdefs(): - card = rschema.rproperty(subj, obj, 'cardinality') - if card[0] in '1?' and len(rschema.subjects(obj)) == 1: - relmap.setdefault((rschema, subj), []).append(str(obj)) - if card[1] in '1?' and len(rschema.objects(subj)) == 1: - relmap.setdefault((rschema, obj), []).append(str(subj)) - unprotected = unprotected_entities(schema) - for etype in skip: - unprotected.add(etype) - howmanydict = {} - for etype in unprotected_entities(schema, strict=True): - howmanydict[str(etype)] = cursor.execute('Any COUNT(X) WHERE X is %s' % etype)[0][0] - if etype in unprotected: - howmanydict[str(etype)] += how_many - for (rschema, etype), targets in relmap.iteritems(): - # XXX should 1. check no cycle 2. propagate changes - relfactor = sum(howmanydict[e] for e in targets) - howmanydict[str(etype)] = max(relfactor, howmanydict[etype]) - return howmanydict - - def line_context_filter(line_no, center, before=3, after=None): """return true if line are in context - if after is None: after = before""" + + if after is None: after = before + """ if after is None: after = before return center - before <= line_no <= center + after -## base webtest class ######################################################### -VALMAP = {None: None, 'dtd': DTDValidator, 'xml': SaxOnlyValidator} + +def unprotected_entities(schema, strict=False): + """returned a set of each non final entity type, excluding "system" entities + (eg CWGroup, CWUser...) + """ + if strict: + protected_entities = yams.schema.BASE_TYPES + else: + protected_entities = yams.schema.BASE_TYPES.union(SYSTEM_ENTITIES) + return set(schema.entities()) - protected_entities + + +def get_versions(self, checkversions=False): + """return the a dictionary containing cubes used by this instance + as key with their version as value, including cubicweb version. This is a + public method, not requiring a session id. + + replace Repository.get_versions by this method if you don't want versions + checking + """ + vcconf = {'cubicweb': self.config.cubicweb_version()} + self.config.bootstrap_cubes() + for pk in self.config.cubes(): + version = self.config.cube_version(pk) + vcconf[pk] = version + self.config._cubes = None + return vcconf + + +def refresh_repo(repo): + devtools.reset_test_database(repo.config) + for pool in repo.pools: + pool.reconnect() + repo._type_source_cache = {} + repo._extid_cache = {} + repo.querier._rql_cache = {} + for source in repo.sources: + source.reset_caches() + + +# email handling, to test emails sent by an application ######################## + +MAILBOX = [] + +class Email: + """you'll get instances of Email into MAILBOX during tests that trigger + some notification. + + * `msg` is the original message object + + * `recipients` is a list of email address which are the recipients of this + message + """ + def __init__(self, recipients, msg): + self.recipients = recipients + self.msg = msg + + @property + def message(self): + return message_from_string(self.msg) + + @property + def subject(self): + return self.message.get('Subject') + + @property + def content(self): + return self.message.get_payload(decode=True) + + def __repr__(self): + return '' % (','.join(self.recipients), + self.message.get('Subject')) + +# the trick to get email into MAILBOX instead of actually sent: monkey patch +# cwconfig.SMTP object +class MockSMTP: + def __init__(self, server, port): + pass + def close(self): + pass + def sendmail(self, helo_addr, recipients, msg): + MAILBOX.append(Email(recipients, msg)) + +cwconfig.SMTP = MockSMTP + + +# base class for cubicweb tests requiring a full cw environments ############### + +class CubicWebTC(TestCase): + """abstract class for test using an apptest environment + + attributes: + `vreg`, the vregistry + `schema`, self.vreg.schema + `config`, cubicweb configuration + `cnx`, dbapi connection to the repository using an admin user + `session`, server side session associated to `cnx` + `app`, the cubicweb publisher (for web testing) + `repo`, the repository object + + `admlogin`, login of the admin user + `admpassword`, password of the admin user + + """ + appid = 'data' + configcls = devtools.ApptestConfiguration + + @classproperty + def config(cls): + """return the configuration object. Configuration is cached on the test + class. + """ + try: + return cls.__dict__['_config'] + except KeyError: + config = cls._config = cls.configcls(cls.appid) + config.mode = 'test' + return config + + @classmethod + def init_config(cls, config): + """configuration initialization hooks. You may want to override this.""" + source = config.sources()['system'] + cls.admlogin = unicode(source['db-user']) + cls.admpassword = source['db-password'] + # uncomment the line below if you want rql queries to be logged + #config.global_set_option('query-log-file', + # '/tmp/test_rql_log.' + `os.getpid()`) + config.global_set_option('log-file', None) + # set default-dest-addrs to a dumb email address to avoid mailbox or + # mail queue pollution + config.global_set_option('default-dest-addrs', ['whatever']) + try: + send_to = '%s@logilab.fr' % os.getlogin() + # AttributeError since getlogin not available under all platforms + except (OSError, AttributeError): + send_to = '%s@logilab.fr' % (os.environ.get('USER') + or os.environ.get('USERNAME') + or os.environ.get('LOGNAME')) + config.global_set_option('sender-addr', send_to) + config.global_set_option('default-dest-addrs', send_to) + config.global_set_option('sender-name', 'cubicweb-test') + config.global_set_option('sender-addr', 'cubicweb-test@logilab.fr') + # web resources + config.global_set_option('base-url', devtools.BASE_URL) + try: + config.global_set_option('embed-allowed', re.compile('.*')) + except: # not in server only configuration + pass + + @classmethod + def _init_repo(cls): + """init the repository and connection to it. + + Repository and connection are cached on the test class. Once + initialized, we simply reset connections and repository caches. + """ + if not 'repo' in cls.__dict__: + cls._build_repo() + else: + cls.cnx.rollback() + cls._refresh_repo() + + @classmethod + def _build_repo(cls): + cls.repo, cls.cnx = devtools.init_test_database(config=cls.config) + cls.init_config(cls.config) + cls.vreg = cls.repo.vreg + cls._orig_cnx = cls.cnx + cls.config.repository = lambda x=None: cls.repo + # necessary for authentication tests + cls.cnx.login = cls.admlogin + cls.cnx.authinfo = {'password': cls.admpassword} + + @classmethod + def _refresh_repo(cls): + refresh_repo(cls.repo) + + # global resources accessors ############################################### + + @property + def schema(self): + """return the application schema""" + return self.vreg.schema + + @property + def session(self): + """return current server side session (using default manager account)""" + return self.repo._sessions[self.cnx.sessionid] + + @property + def adminsession(self): + """return current server side session (using default manager account)""" + return self.repo._sessions[self._orig_cnx.sessionid] + + def set_option(self, optname, value): + self.config.global_set_option(optname, value) + + def set_debug(self, debugmode): + server.set_debug(debugmode) + + # default test setup and teardown ######################################### + + def setUp(self): + pause_tracing() + self._init_repo() + resume_tracing() + self.setup_database() + self.commit() + MAILBOX[:] = [] # reset mailbox + + def setup_database(self): + """add your database setup code by overriding this method""" + + # user / session management ############################################### + + def user(self, req=None): + """return the application schema""" + if req is None: + req = self.request() + return self.cnx.user(req) + else: + return req.user + + def create_user(self, login, groups=('users',), password=None, req=None, + commit=True): + """create and return a new user entity""" + if password is None: + password = login.encode('utf8') + cursor = self._orig_cnx.cursor(req or self.request()) + rset = cursor.execute('INSERT CWUser X: X login %(login)s, X upassword %(passwd)s', + {'login': unicode(login), 'passwd': password}) + user = rset.get_entity(0, 0) + cursor.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)' + % ','.join(repr(g) for g in groups), + {'x': user.eid}, 'x') + user.clear_related_cache('in_group', 'subject') + if commit: + self._orig_cnx.commit() + return user -class WebTest(EnvBasedTC): - """base class for web tests""" - __abstract__ = True + def login(self, login, **kwargs): + """return a connection for the given login/password""" + if login == self.admlogin: + self.restore_connection() + else: + if not kwargs: + kwargs['password'] = str(login) + self.cnx = repo_connect(self.repo, unicode(login), + cnxprops=ConnectionProperties('inmemory'), + **kwargs) + if login == self.vreg.config.anonymous_user()[0]: + self.cnx.anonymous_connection = True + return self.cnx + + def restore_connection(self): + if not self.cnx is self._orig_cnx: + try: + self.cnx.close() + except ProgrammingError: + pass # already closed + self.cnx = self._orig_cnx + + # db api ################################################################## + + @nocoverage + def cursor(self, req=None): + return self.cnx.cursor(req or self.request()) + + @nocoverage + def execute(self, rql, args=None, eidkey=None, req=None): + """executes , builds a resultset, and returns a couple (rset, req) + where req is a FakeRequest + """ + req = req or self.request(rql=rql) + return self.cnx.cursor(req).execute(unicode(rql), args, eidkey) + + @nocoverage + def commit(self): + self.cnx.commit() + + @nocoverage + def rollback(self): + try: + self.cnx.rollback() + except ProgrammingError: + pass + + # # server side db api ####################################################### + + def sexecute(self, rql, args=None, eid_key=None): + self.session.set_pool() + return self.session.execute(rql, args, eid_key) + + # other utilities ######################################################### + + def entity(self, rql, args=None, eidkey=None, req=None): + return self.execute(rql, args, eidkey, req=req).get_entity(0, 0) + + # vregistry inspection utilities ########################################### + + def pviews(self, req, rset): + return sorted((a.__regid__, a.__class__) + for a in self.vreg['views'].possible_views(req, rset=rset)) + + def pactions(self, req, rset, + skipcategories=('addrelated', 'siteactions', 'useractions', 'footer')): + return [(a.__regid__, a.__class__) + for a in self.vreg['actions'].poss_visible_objects(req, rset=rset) + if a.category not in skipcategories] + + def pactions_by_cats(self, req, rset, categories=('addrelated',)): + return [(a.__regid__, a.__class__) + for a in self.vreg['actions'].poss_visible_objects(req, rset=rset) + if a.category in categories] + + def pactionsdict(self, req, rset, + skipcategories=('addrelated', 'siteactions', 'useractions', 'footer')): + res = {} + for a in self.vreg['actions'].poss_visible_objects(req, rset=rset): + if a.category not in skipcategories: + res.setdefault(a.category, []).append(a.__class__) + return res + + def action_submenu(self, req, rset, id): + return self._test_action(self.vreg['actions'].select(id, req, rset=rset)) + + def _test_action(self, action): + class fake_menu(list): + @property + def items(self): + return self + class fake_box(object): + def mk_action(self, label, url, **kwargs): + return (label, url) + def box_action(self, action, **kwargs): + return (action.title, action.url()) + submenu = fake_menu() + action.fill_menu(fake_box(), submenu) + return submenu - pdbclass = CubicWebDebugger - # this is a hook to be able to define a list of rql queries - # that are application dependent and cannot be guessed automatically - application_rql = [] + def list_views_for(self, rset): + """returns the list of views that can be applied on `rset`""" + req = rset.req + only_once_vids = ('primary', 'secondary', 'text') + req.data['ex'] = ValueError("whatever") + viewsvreg = self.vreg['views'] + for vid, views in viewsvreg.items(): + if vid[0] == '_': + continue + if rset.rowcount > 1 and vid in only_once_vids: + continue + views = [view for view in views + if view.category != 'startupview' + and not issubclass(view, notification.NotificationView)] + if views: + try: + view = viewsvreg._select_best(views, req, rset=rset) + if view.linkable(): + yield view + else: + not_selected(self.vreg, view) + # else the view is expected to be used as subview and should + # not be tested directly + except NoSelectableObject: + continue + + def list_actions_for(self, rset): + """returns the list of actions that can be applied on `rset`""" + req = rset.req + for action in self.vreg['actions'].possible_objects(req, rset=rset): + yield action + + def list_boxes_for(self, rset): + """returns the list of boxes that can be applied on `rset`""" + req = rset.req + for box in self.vreg['boxes'].possible_objects(req, rset=rset): + yield box + + def list_startup_views(self): + """returns the list of startup views""" + req = self.request() + for view in self.vreg['views'].possible_views(req, None): + if view.category == 'startupview': + yield view.__regid__ + else: + not_selected(self.vreg, view) + + # web ui testing utilities ################################################# + + @property + @cached + def app(self): + """return a cubicweb publisher""" + publisher = application.CubicWebPublisher(self.config, vreg=self.vreg) + def raise_error_handler(*args, **kwargs): + raise + publisher.error_handler = raise_error_handler + return publisher + + requestcls = fake.FakeRequest + def request(self, *args, **kwargs): + """return a web ui request""" + req = self.requestcls(self.vreg, form=kwargs) + req.set_connection(self.cnx) + return req + + def remote_call(self, fname, *args): + """remote json call simulation""" + dump = simplejson.dumps + args = [dump(arg) for arg in args] + req = self.request(fname=fname, pageid='123', arg=args) + ctrl = self.vreg['controllers'].select('json', req) + return ctrl.publish(), req + + def app_publish(self, req, path='view'): + return self.app.publish(path, req) + + def ctrl_publish(self, req): + """call the publish method of the edit controller""" + ctrl = self.vreg['controllers'].select('edit', req) + try: + result = ctrl.publish() + req.cnx.commit() + except web.Redirect: + req.cnx.commit() + raise + return result + + def expect_redirect(self, callback, req): + """call the given callback with req as argument, expecting to get a + Redirect exception + """ + try: + res = callback(req) + except Redirect, ex: + try: + path, params = ex.location.split('?', 1) + except ValueError: + path = ex.location + params = {} + else: + cleanup = lambda p: (p[0], unquote(p[1])) + params = dict(cleanup(p.split('=', 1)) for p in params.split('&') if p) + path = path[len(req.base_url()):] + return path, params + else: + self.fail('expected a Redirect exception') + + def expect_redirect_publish(self, req, path='view'): + """call the publish method of the application publisher, expecting to + get a Redirect exception + """ + return self.expect_redirect(lambda x: self.app_publish(x, path), req) + + def init_authentication(self, authmode, anonuser=None): + self.set_option('auth-mode', authmode) + self.set_option('anonymous-user', anonuser) + req = self.request() + origcnx = req.cnx + req.cnx = None + sh = self.app.session_handler + authm = sh.session_manager.authmanager + authm.authinforetreivers[-1].anoninfo = self.vreg.config.anonymous_user() + # not properly cleaned between tests + self.open_sessions = sh.session_manager._sessions = {} + return req, origcnx + + def assertAuthSuccess(self, req, origcnx, nbsessions=1): + sh = self.app.session_handler + path, params = self.expect_redirect(lambda x: self.app.connect(x), req) + cnx = req.cnx + self.assertEquals(len(self.open_sessions), nbsessions, self.open_sessions) + self.assertEquals(cnx.login, origcnx.login) + self.assertEquals(cnx.anonymous_connection, False) + self.assertEquals(path, 'view') + self.assertEquals(params, {'__message': 'welcome %s !' % cnx.user().login}) + + def assertAuthFailure(self, req, nbsessions=0): + self.assertRaises(AuthenticationError, self.app.connect, req) + self.assertEquals(req.cnx, None) + self.assertEquals(len(self.open_sessions), nbsessions) + clear_cache(req, 'get_authorization') + + # content validation ####################################################### # validators are used to validate (XML, DTD, whatever) view's content # validators availables are : @@ -100,8 +553,8 @@ # snippets #'text/html': DTDValidator, #'application/xhtml+xml': DTDValidator, - 'application/xml': SaxOnlyValidator, - 'text/xml': SaxOnlyValidator, + 'application/xml': htmlparser.SaxOnlyValidator, + 'text/xml': htmlparser.SaxOnlyValidator, 'text/plain': None, 'text/comma-separated-values': None, 'text/x-vcard': None, @@ -110,75 +563,9 @@ 'image/png': None, } # maps vid : validator name (override content_type_validators) - vid_validators = dict((vid, VALMAP[valkey]) + vid_validators = dict((vid, htmlparser.VALMAP[valkey]) for vid, valkey in VIEW_VALIDATORS.iteritems()) - no_auto_populate = () - ignored_relations = () - - def custom_populate(self, how_many, cursor): - pass - - def post_populate(self, cursor): - pass - - @nocoverage - def auto_populate(self, how_many): - """this method populates the database with `how_many` entities - of each possible type. It also inserts random relations between them - """ - cu = self.cursor() - self.custom_populate(how_many, cu) - vreg = self.vreg - howmanydict = how_many_dict(self.schema, cu, how_many, self.no_auto_populate) - for etype in unprotected_entities(self.schema): - if etype in self.no_auto_populate: - continue - nb = howmanydict.get(etype, how_many) - for rql, args in insert_entity_queries(etype, self.schema, vreg, nb): - cu.execute(rql, args) - edict = {} - for etype in unprotected_entities(self.schema, strict=True): - rset = cu.execute('%s X' % etype) - edict[str(etype)] = set(row[0] for row in rset.rows) - existingrels = {} - ignored_relations = SYSTEM_RELATIONS + self.ignored_relations - for rschema in self.schema.relations(): - if rschema.final or rschema in ignored_relations: - continue - rset = cu.execute('DISTINCT Any X,Y WHERE X %s Y' % rschema) - existingrels.setdefault(rschema.type, set()).update((x, y) for x, y in rset) - q = make_relations_queries(self.schema, edict, cu, ignored_relations, - existingrels=existingrels) - for rql, args in q: - try: - cu.execute(rql, args) - except ValidationError, ex: - # failed to satisfy some constraint - print 'error in automatic db population', ex - self.post_populate(cu) - self.commit() - - @nocoverage - def _check_html(self, output, view, template='main-template'): - """raises an exception if the HTML is invalid""" - try: - validatorclass = self.vid_validators[view.id] - except KeyError: - if view.content_type in ('text/html', 'application/xhtml+xml'): - if template is None: - default_validator = HTMLValidator - else: - default_validator = DTDValidator - else: - default_validator = None - validatorclass = self.content_type_validators.get(view.content_type, - default_validator) - if validatorclass is None: - return None - validator = validatorclass() - return validator.parse_string(output.strip()) - def view(self, vid, rset=None, req=None, template='main-template', **kwargs): @@ -233,9 +620,9 @@ # is not an AssertionError klass, exc, tcbk = sys.exc_info() try: - msg = '[%s in %s] %s' % (klass, view.id, exc) + msg = '[%s in %s] %s' % (klass, view.__regid__, exc) except: - msg = '[%s in %s] undisplayable exception' % (klass, view.id) + msg = '[%s in %s] undisplayable exception' % (klass, view.__regid__) if output is not None: position = getattr(exc, "position", (0,))[0] if position: @@ -252,9 +639,159 @@ raise AssertionError, msg, tcbk + @nocoverage + def _check_html(self, output, view, template='main-template'): + """raises an exception if the HTML is invalid""" + try: + validatorclass = self.vid_validators[view.__regid__] + except KeyError: + if view.content_type in ('text/html', 'application/xhtml+xml'): + if template is None: + default_validator = htmlparser.HTMLValidator + else: + default_validator = htmlparser.DTDValidator + else: + default_validator = None + validatorclass = self.content_type_validators.get(view.content_type, + default_validator) + if validatorclass is None: + return None + validator = validatorclass() + return validator.parse_string(output.strip()) + + # deprecated ############################################################### + + @deprecated('[3.6] use self.request().create_entity(...)') + def add_entity(self, etype, req=None, **kwargs): + if req is None: + req = self.request() + return req.create_entity(etype, **kwargs) + + @deprecated('[3.4] use self.vreg["etypes"].etype_class(etype)(self.request())') + def etype_instance(self, etype, req=None): + req = req or self.request() + e = self.vreg['etypes'].etype_class(etype)(req) + e.eid = None + return e + + @nocoverage + @deprecated('[3.4] use req = self.request(); rset = req.execute()') + def rset_and_req(self, rql, optional_args=None, args=None, eidkey=None): + """executes , builds a resultset, and returns a + couple (rset, req) where req is a FakeRequest + """ + return (self.execute(rql, args, eidkey), + self.request(rql=rql, **optional_args or {})) + + +# auto-populating test classes and utilities ################################### + +from cubicweb.devtools.fill import insert_entity_queries, make_relations_queries + +def how_many_dict(schema, cursor, how_many, skip): + """compute how many entities by type we need to be able to satisfy relations + cardinality + """ + # compute how many entities by type we need to be able to satisfy relation constraint + relmap = {} + for rschema in schema.relations(): + if rschema.final: + continue + for subj, obj in rschema.rdefs: + card = rschema.rdef(subj, obj).cardinality + if card[0] in '1?' and len(rschema.subjects(obj)) == 1: + relmap.setdefault((rschema, subj), []).append(str(obj)) + if card[1] in '1?' and len(rschema.objects(subj)) == 1: + relmap.setdefault((rschema, obj), []).append(str(subj)) + unprotected = unprotected_entities(schema) + for etype in skip: + unprotected.add(etype) + howmanydict = {} + for etype in unprotected_entities(schema, strict=True): + howmanydict[str(etype)] = cursor.execute('Any COUNT(X) WHERE X is %s' % etype)[0][0] + if etype in unprotected: + howmanydict[str(etype)] += how_many + for (rschema, etype), targets in relmap.iteritems(): + # XXX should 1. check no cycle 2. propagate changes + relfactor = sum(howmanydict[e] for e in targets) + howmanydict[str(etype)] = max(relfactor, howmanydict[etype]) + return howmanydict + + +class AutoPopulateTest(CubicWebTC): + """base class for test with auto-populating of the database""" + __abstract__ = True + + pdbclass = CubicWebDebugger + # this is a hook to be able to define a list of rql queries + # that are application dependent and cannot be guessed automatically + application_rql = [] + + no_auto_populate = () + ignored_relations = set() + def to_test_etypes(self): return unprotected_entities(self.schema, strict=True) + def custom_populate(self, how_many, cursor): + pass + + def post_populate(self, cursor): + pass + + + @nocoverage + def auto_populate(self, how_many): + """this method populates the database with `how_many` entities + of each possible type. It also inserts random relations between them + """ + cu = self.cursor() + self.custom_populate(how_many, cu) + vreg = self.vreg + howmanydict = how_many_dict(self.schema, cu, how_many, self.no_auto_populate) + for etype in unprotected_entities(self.schema): + if etype in self.no_auto_populate: + continue + nb = howmanydict.get(etype, how_many) + for rql, args in insert_entity_queries(etype, self.schema, vreg, nb): + cu.execute(rql, args) + edict = {} + for etype in unprotected_entities(self.schema, strict=True): + rset = cu.execute('%s X' % etype) + edict[str(etype)] = set(row[0] for row in rset.rows) + existingrels = {} + ignored_relations = SYSTEM_RELATIONS | self.ignored_relations + for rschema in self.schema.relations(): + if rschema.final or rschema in ignored_relations: + continue + rset = cu.execute('DISTINCT Any X,Y WHERE X %s Y' % rschema) + existingrels.setdefault(rschema.type, set()).update((x, y) for x, y in rset) + q = make_relations_queries(self.schema, edict, cu, ignored_relations, + existingrels=existingrels) + for rql, args in q: + try: + cu.execute(rql, args) + except ValidationError, ex: + # failed to satisfy some constraint + print 'error in automatic db population', ex + self.post_populate(cu) + self.commit() + + def iter_individual_rsets(self, etypes=None, limit=None): + etypes = etypes or self.to_test_etypes() + for etype in etypes: + if limit: + rql = 'Any X LIMIT %s WHERE X is %s' % (limit, etype) + else: + rql = 'Any X WHERE X is %s' % etype + rset = self.execute(rql) + for row in xrange(len(rset)): + if limit and row > limit: + break + # XXX iirk + rset2 = rset.limit(limit=1, offset=row) + yield rset2 + def iter_automatic_rsets(self, limit=10): """generates basic resultsets for each entity type""" etypes = self.to_test_etypes() @@ -275,54 +812,6 @@ for rql in self.application_rql: yield self.execute(rql) - - def list_views_for(self, rset): - """returns the list of views that can be applied on `rset`""" - req = rset.req - only_once_vids = ('primary', 'secondary', 'text') - req.data['ex'] = ValueError("whatever") - viewsvreg = self.vreg['views'] - for vid, views in viewsvreg.items(): - if vid[0] == '_': - continue - if rset.rowcount > 1 and vid in only_once_vids: - continue - views = [view for view in views - if view.category != 'startupview' - and not issubclass(view, NotificationView)] - if views: - try: - view = viewsvreg.select_best(views, req, rset=rset) - if view.linkable(): - yield view - else: - not_selected(self.vreg, view) - # else the view is expected to be used as subview and should - # not be tested directly - except NoSelectableObject: - continue - - def list_actions_for(self, rset): - """returns the list of actions that can be applied on `rset`""" - req = rset.req - for action in self.vreg['actions'].possible_objects(req, rset=rset): - yield action - - def list_boxes_for(self, rset): - """returns the list of boxes that can be applied on `rset`""" - req = rset.req - for box in self.vreg['boxes'].possible_objects(req, rset=rset): - yield box - - def list_startup_views(self): - """returns the list of startup views""" - req = self.request() - for view in self.vreg['views'].possible_views(req, None): - if view.category == 'startupview': - yield view.id - else: - not_selected(self.vreg, view) - def _test_everything_for(self, rset): """this method tries to find everything that can be tested for `rset` and yields a callable test (as needed in generative tests) @@ -334,24 +823,32 @@ propdefs[k]['default'] = True for view in self.list_views_for(rset): backup_rset = rset.copy(rset.rows, rset.description) - yield InnerTest(self._testname(rset, view.id, 'view'), - self.view, view.id, rset, + yield InnerTest(self._testname(rset, view.__regid__, 'view'), + self.view, view.__regid__, rset, rset.req.reset_headers(), 'main-template') # We have to do this because some views modify the # resultset's syntax tree rset = backup_rset for action in self.list_actions_for(rset): - yield InnerTest(self._testname(rset, action.id, 'action'), self._test_action, action) + yield InnerTest(self._testname(rset, action.__regid__, 'action'), self._test_action, action) for box in self.list_boxes_for(rset): - yield InnerTest(self._testname(rset, box.id, 'box'), box.render) + yield InnerTest(self._testname(rset, box.__regid__, 'box'), box.render) @staticmethod def _testname(rset, objid, objtype): return '%s_%s_%s' % ('_'.join(rset.column_types(0)), objid, objtype) -class AutomaticWebTest(WebTest): +# concrete class for automated application testing ############################ + +class AutomaticWebTest(AutoPopulateTest): """import this if you wan automatic tests to be ran""" + def setUp(self): + AutoPopulateTest.setUp(self) + # access to self.app for proper initialization of the authentication + # machinery (else some views may fail) + self.app + ## one each def test_one_each_config(self): self.auto_populate(1) @@ -373,17 +870,7 @@ yield self.view, vid, None, req -class RealDBTest(WebTest): - - def iter_individual_rsets(self, etypes=None, limit=None): - etypes = etypes or unprotected_entities(self.schema, strict=True) - for etype in etypes: - rset = self.execute('Any X WHERE X is %s' % etype) - for row in xrange(len(rset)): - if limit and row > limit: - break - rset2 = rset.limit(limit=1, offset=row) - yield rset2 +# registry instrumentization ################################################### def not_selected(vreg, appobject): try: @@ -391,16 +878,17 @@ except (KeyError, AttributeError): pass + def vreg_instrumentize(testclass): + # XXX broken from cubicweb.devtools.apptest import TestEnvironment - env = testclass._env = TestEnvironment('data', configcls=testclass.configcls, - requestcls=testclass.requestcls) + env = testclass._env = TestEnvironment('data', configcls=testclass.configcls) for reg in env.vreg.values(): reg._selected = {} try: orig_select_best = reg.__class__.__orig_select_best except: - orig_select_best = reg.__class__.select_best + orig_select_best = reg.__class__._select_best def instr_select_best(self, *args, **kwargs): selected = orig_select_best(self, *args, **kwargs) try: @@ -410,9 +898,10 @@ except AttributeError: pass # occurs on reg used to restore database return selected - reg.__class__.select_best = instr_select_best + reg.__class__._select_best = instr_select_best reg.__class__.__orig_select_best = orig_select_best + def print_untested_objects(testclass, skipregs=('hooks', 'etypes')): for regname, reg in testclass._env.vreg.iteritems(): if regname in skipregs: diff -r 15d541321a8c -r 74c1597f8a82 doc/book/en/.templates/layout.html --- a/doc/book/en/.templates/layout.html Wed Jan 20 10:13:02 2010 +0100 +++ b/doc/book/en/.templates/layout.html Wed Jan 20 10:13:45 2010 +0100 @@ -111,6 +111,7 @@ + {%- if use_opensearch %} %s

    ' % entry.view('inblogcontext')) class BlogEntryInBlogView(EntityView): - 'inblogcontext' + id = 'inblogcontext' __select__ = implements('BlogEntry') def cell_call(self, row, col): diff -r 15d541321a8c -r 74c1597f8a82 doc/book/en/development/webstdlib/basetemplates.rst --- a/doc/book/en/development/webstdlib/basetemplates.rst Wed Jan 20 10:13:02 2010 +0100 +++ b/doc/book/en/development/webstdlib/basetemplates.rst Wed Jan 20 10:13:45 2010 +0100 @@ -126,8 +126,8 @@ title=False, message=False) def get_searchbox(self, view, context): - boxes = list(self.vreg.possible_vobjects('boxes', self.req, self.rset, - view=view, context=context)) + boxes = list(self.vreg.poss_visible_objects('boxes', self.req, self.rset, + view=view, context=context)) if boxes: for box in boxes: if box.id == 'search_box': diff -r 15d541321a8c -r 74c1597f8a82 doc/book/en/intro/tutorial/blog-in-five-minutes.rst --- a/doc/book/en/intro/tutorial/blog-in-five-minutes.rst Wed Jan 20 10:13:02 2010 +0100 +++ b/doc/book/en/intro/tutorial/blog-in-five-minutes.rst Wed Jan 20 10:13:45 2010 +0100 @@ -17,9 +17,24 @@ cubicweb-ctl start -D myblog -This is it. Your blog is running. Visit http://localhost:8080 and enjoy it! +The -D option is the debugging mode of cubicweb, removing it will lauch the instance in the background. + +Permission +~~~~~~~~~~ + +This command assumes that you have root access to the /etc/ path. In order to initialize your instance as a `user` (from scratch), please check your current PYTHONPATH then create the ~/etc/cubicweb.d directory. + +Instance parameters +~~~~~~~~~~~~~~~~~~~ -As a developer, you'll want to know more about developing new cubes and -customizing the look of your instance. This is what the next section is about. +If the database installation failed, you'd like to change some instance parameters, for example, the database host or the user name. These informations can be edited in the `source` file located in the /etc/cubicweb.d/myblog directory. + +Then relaunch the database creation: + + cubicweb-ctl db-create myblog + +Other paramaters, like web server or emails parameters, can be modified in the `all-in-one.conf` file. + +This is it. Your blog is running. Visit http://localhost:8080 and enjoy it! This blog is fully functionnal. The next section section will present the way to develop new cubes and customizing the look of your instance. diff -r 15d541321a8c -r 74c1597f8a82 doc/book/en/intro/tutorial/create-cube.rst --- a/doc/book/en/intro/tutorial/create-cube.rst Wed Jan 20 10:13:02 2010 +0100 +++ b/doc/book/en/intro/tutorial/create-cube.rst Wed Jan 20 10:13:45 2010 +0100 @@ -28,6 +28,8 @@ :: + from yams.buildobjs import EntityType, String, SubjectRelation, Date + class Blog(EntityType): title = String(maxsize=50, required=True) description = String() @@ -38,6 +40,8 @@ content = String(required=True, fulltextindexed=True) entry_of = SubjectRelation('Blog', cardinality='?*') +The first step is the import of the EntityType (generic class for entityà and +attributes that will be used in both Blog and BlogEntry entities. A Blog has a title and a description. The title is a string that is required and must be less than 50 characters. The @@ -64,8 +68,8 @@ cubicweb-ctl create blog blogdemo +This command will create the corresponding database and initialize it. -This command will create the corresponding database and initialize it. Welcome to your web instance ------------------------------- diff -r 15d541321a8c -r 74c1597f8a82 doc/book/fr/01-introduction.fr.txt --- a/doc/book/fr/01-introduction.fr.txt Wed Jan 20 10:13:02 2010 +0100 +++ b/doc/book/fr/01-introduction.fr.txt Wed Jan 20 10:13:45 2010 +0100 @@ -6,13 +6,13 @@ =========================== `CubicWeb` nous permet de développer des instances d'applications web -basées sur un ou plusieurs `cube`. +basées sur un ou plusieurs `cube`. -Ce à quoi nous réferrons en parlant de `cube` est un modèle définissant -des types de données et des vues. Un `cube` est un composant re-utilisable +Ce à quoi nous réferrons en parlant de `cube` est un modèle définissant +des types de données et des vues. Un `cube` est un composant re-utilisable regroupé avec d'autres cubes sur le système de fichiers. -Un `instance` réferre à une installation spécifique d'un ou plusieurs cubes +Un `instance` réferre à une installation spécifique d'un ou plusieurs cubes où sont regroupés tous les fichiers de configuration de l'application web finale. Dans ce document, nous allons vous montrer comment créer un cube et l'utiliser @@ -32,7 +32,7 @@ | |-- data/ | |-- cubes.blog.css - | |-- cubes.blog.js + | |-- cubes.blog.js | |-- external_resources | |-- debian/ @@ -77,7 +77,7 @@ |-- views.py Toute modification apportée à votre modele de données devra -etre effectué dans ce répertoire. +etre effectué dans ce répertoire. @@ -102,17 +102,17 @@ title = String(required=True, fulltextindexed=True, maxsize=256) publish_date = Date(default='TODAY') content = String(required=True, fulltextindexed=True) - entry_of = SubjectRelation('Blog', cardinality='?*') + entry_of = SubjectRelation('Blog', cardinality='?*') -Un ``Blog`` a un titre et une description. Le titre est une chaîne +Un ``Blog`` a un titre et une description. Le titre est une chaîne de caractères requise par la classe parente EntityType et ne doit -pas excéder 50 caractères. La description est une chaîne de +pas excéder 50 caractères. La description est une chaîne de caractères sans contraintes. Une ``BlogEntry`` a un titre, une date de publication et du texte -étant son contenu. Le titre est une chaîne de caractères qui ne +étant son contenu. Le titre est une chaîne de caractères qui ne doit pas excéder 100 caractères. La date de publication est de type Date et a -pour valeur par défaut TODAY, ce qui signifie que lorsqu'une +pour valeur par défaut TODAY, ce qui signifie que lorsqu'une ``BlogEntry`` sera créée, sa date de publication sera la date courante a moins de modifier ce champ. Le texte est une chaîne de caractères qui sera indexée en plein texte et sans contraintes. @@ -121,7 +121,7 @@ relie à un ``Blog``. La cardinalité ``?*`` signifie que BlogEntry peut faire partie de zero a un Blog (``?`` signifie `zero ou un`) et qu'un Blog peut avoir une infinité de BlogEntry (``*`` signifie -`n'importe quel nombre incluant zero`). +`n'importe quel nombre incluant zero`). Par soucis de complétude, nous rappellerons que ``+`` signifie `un ou plus`. @@ -130,7 +130,7 @@ -------------------- :: - + cubicweb-ctl create blog blogdemo Cette commande va créer un répertoire ``~/etc/cubicweb.d/blogdemo`` @@ -150,7 +150,7 @@ Vous pouvez à présent accéder à votre application web vous permettant de créer des blogs et d'y poster des messages en visitant l'URL http://localhost:8080/. Un premier formulaire d'authentification va vous être proposé. Par défaut, -l'application n'autorisera pas d'utilisateur anonyme à accéder a votre +l'application n'autorisera pas d'utilisateur anonyme à accéder a votre application. Vous devrez donc utiliser l'utilisateur administrateur que vous aurez crée lors de l'initialisation de votre base de données via ``cubicweb-ctl create``. @@ -166,7 +166,7 @@ Rappelez-vous que pour le moment, tout a été géré par la plate-forme `CubicWeb` et que la seule chose qui a été fournie est le schéma de -données. +données. Créons des entités ------------------ @@ -186,7 +186,7 @@ :alt: from to create blog En cliquant sur le logo situé dans le coin gauche de la fenêtre, -vous allez être redirigé vers la page d'accueil. Ensuite, si vous allez +vous allez être redirigé vers la page d'accueil. Ensuite, si vous allez sur le lien Blog, vous devriez voir la liste des entités Blog, en particulier celui que vous venez juste de créer ``Tech-Blog``. @@ -212,7 +212,7 @@ un peut de texte avant de ``Valider``. Vous venez d'ajouter un article sans avoir précisé à quel Blog il appartenait. Dans la colonne de gauche se trouve une boite intitulé ``actions``, cliquez sur le menu ``modifier``. -Vous êtes de retour sur le formulaire d'édition de l'article que vous +Vous êtes de retour sur le formulaire d'édition de l'article que vous venez de créer, à ceci près que ce formulaire a maintenant une nouvelle section intitulée ``ajouter relation``. Choisissez ``entry_of`` dans ce menu, cela va faire apparaitre une deuxième menu déroulant dans lequel vous @@ -225,7 +225,7 @@ :alt: editing a blog entry to add a relation to a blog Validez vos modifications en cliquant sur ``Valider``. L'entité article -qui est listée contient maintenant un lien vers le Blog auquel il +qui est listée contient maintenant un lien vers le Blog auquel il appartient, ``MyLife``. .. image:: images/cbw-detail-one-blogentry.fr.png @@ -248,7 +248,7 @@ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Une vue est une classe Python qui inclut: - + - un identifiant (tous les objets dans `CubicWeb` sont listés dans un registre et cet identifiant est utilisé comme la clé) @@ -262,32 +262,32 @@ des entités que nous cherchons à appliquer. `CubicWeb` utilise un sélecteur qui permet de calculer un score et d'identifier la vue la plus adaptée au `result set` que nous voulons afficher. La librarie -standard des sélecteurs se trouve dans ``cubicweb.common.selector`` -et une librairie des méthodes utilisées pour calculer les scores +standard des sélecteurs se trouve dans ``cubicweb.common.selector`` +et une librairie des méthodes utilisées pour calculer les scores est dans ``cubicweb.vregistry.vreq``. Il est possible de définir plusieurs vues ayant le meme identifiant -et d'y associer des sélecteurs et des filtres afin de permettre à -l'application de s'adapter au mieux aux données que nous avons +et d'y associer des sélecteurs et des filtres afin de permettre à +l'application de s'adapter au mieux aux données que nous avons besoin d'afficher. Nous verrons cela plus en détails dans :ref:`DefinitionVues`. On peut citer l'exemple de la vue nommée ``primary`` qui est celle utilisée -pour visualiser une entité seule. Nous allons vous montrer comment modifier +pour visualiser une entité seule. Nous allons vous montrer comment modifier cette vue. Modification des vues ~~~~~~~~~~~~~~~~~~~~~ Si vous souhaitez modifier la manière dont est rendue un article (`Blogentry`), -vous devez surcharger la vue ``primary`` définie dans le module ``views`` de +vous devez surcharger la vue ``primary`` définie dans le module ``views`` de votre cube, ``cubes/blog/views.py``. Nous pourrions par exemple ajouter devant la date de publication un préfixe indiquant que la date visualisée est la date de publication. -Pour cela appliquez les modifications suivantes: +Pour cela appliquez les modifications suivantes: -:: +:: from cubicweb.web.views import baseviews @@ -303,13 +303,13 @@ return entity.view('reledit', rtype='content_format') def cell_call(self, row, col): - entity = self.entity(row, col) + entity = self.rset.get_entity(row, col) # display entity attributes with prefixes self.w(u'

    %s

    ' % entity.title) self.w(u'

    published on %s

    ' % entity.publish_date.strftime('%Y-%m-%d')) self.w(u'

    %s

    ' % entity.content) - + # display relations siderelations = [] if self.main_related_section: @@ -329,10 +329,10 @@ -Le code que nous avons modifié définit une vue primaire pour une entité de -type `BlogEntry`. +Le code que nous avons modifié définit une vue primaire pour une entité de +type `BlogEntry`. -Etant donné que les vues sont appliquées sur des `result sets` et que +Etant donné que les vues sont appliquées sur des `result sets` et que les `result sets` peuvent être des tableaux de données, il est indispensable de récupérer l'entité selon ses coordonnées (row,col). diff -r 15d541321a8c -r 74c1597f8a82 doc/book/fr/05-define-views.fr.txt --- a/doc/book/fr/05-define-views.fr.txt Wed Jan 20 10:13:02 2010 +0100 +++ b/doc/book/fr/05-define-views.fr.txt Wed Jan 20 10:13:45 2010 +0100 @@ -20,17 +20,17 @@ * `cell_call(row, col, **kwargs)`, appelle la vue pour une cellule donnée d'un result set * `url()`, retourne l'url permettant d'obtenir cette vue avec le result set en - cours + cours * `view(__vid, rset, __fallback_vid=None, **kwargs)`, appelle la vue d'identificant `__vid` sur le result set donné. Il est possible de données un identificant de vue de "fallback" qui sera utilisé si la vue demandée n'est pas applicable au result set - + * `wview(__vid, rset, __fallback_vid=None, **kwargs)`, pareil que `view` mais passe automatiquement le flux en argument - + * `html_headers()`, retourne une liste d'en-tête HTML à placer par le template - principal + principal * `page_title()`, retourne le titre à utiliser dans l'en tête HTML `title` @@ -55,7 +55,7 @@ [FROM-LAX-BOOK] -Tip: when modifying views, you do not need to restart the local +Tip: when modifying views, you do not need to restart the local server. Just save the file in your editor and reload the page in your browser to see the changes. @@ -63,7 +63,7 @@ - an identifier (all objects in `LAX` are entered in a registry and this identifier will be used as a key) - + - a filter to select the resulsets it can be applied to `LAX` provides a lot of standard views, for a complete list, you @@ -83,14 +83,14 @@ 05. accepts = ('BlogEntry',) 06. 07. def cell_call(self, row, col): - 08. entity = self.entity(row, col) + 08. entity = self.rset.get_entity(row, col) 09. self.w(u'

    %s

    ' % entity.title) 10. self.w(u'

    published on %s in category %s

    ' % \ 11. (entity.publish_date.strftime('%Y-%m-%d'), entity.category)) 12. self.w(u'

    %s

    ' % entity.text) The above source code defines a new primary view (`line 03`) for -``BlogEntry`` (`line 05`). +``BlogEntry`` (`line 05`). Since views are applied to resultsets and resulsets can be tables of data, it is needed to recover the entity from its (row,col) @@ -108,11 +108,11 @@ Let us now improve the primary view of a blog :: 01. class BlogPrimaryView(baseviews.PrimaryView): - 02. + 02. 03. accepts = ('Blog',) 04. 05. def cell_call(self, row, col): - 06. entity = self.entity(row, col) + 06. entity = self.rset.get_entity(row, col) 07. self.w(u'

    %s

    ' % entity.title) 08. self.w(u'

    %s

    ' % entity.description) 09. rset = self.req.execute('Any E WHERE E entry_of B, B eid "%s"' % entity.eid) @@ -127,9 +127,9 @@ about the schema and infer that such entities have to be of the ``BlogEntry`` kind and retrieves them. -The request returns a selection of data called a resultset. At +The request returns a selection of data called a resultset. At `line 10` the view 'primary' is applied to this resultset to output -HTML. +HTML. **This is to be compared to interfaces and protocols in object-oriented languages. Applying a given view to all the entities of a resultset only @@ -159,7 +159,7 @@ * create view "blogentry table" with title, publish_date, category -We will show that by default the view that displays +We will show that by default the view that displays "Any E,D,C WHERE E publish_date D, E category C" is the table view. Of course, the same can be obtained by calling self.wview('table',rset) @@ -244,4 +244,4 @@ ---------------------------------- Certains navigateurs (dont firefox) n'aime pas les `
    ` vides (par vide j'entend sans contenu dans la balise, il peut y avoir des attributs), faut -toujours mettre `
    ` même s'il n'y a rien dedans, et non `
    `. +toujours mettre `
    ` même s'il n'y a rien dedans, et non `
    `. diff -r 15d541321a8c -r 74c1597f8a82 doc/book/fr/20-01-intro.fr.txt --- a/doc/book/fr/20-01-intro.fr.txt Wed Jan 20 10:13:02 2010 +0100 +++ b/doc/book/fr/20-01-intro.fr.txt Wed Jan 20 10:13:45 2010 +0100 @@ -37,7 +37,7 @@ données manipulées. La syntaxe de la définition est la même que celle proposée par `Google AppEngine` mais il faut remplacer la ligne d'import:: - + from google.appengine.ext import db par celle-ci:: @@ -48,7 +48,7 @@ Un exemple de schéma de données pour un ``Blog`` pourrait être:: from ginco.goa import db - + class BlogEntry(db.Model): # un titre à donner à l'entrée title = db.StringProperty(required=True) @@ -57,8 +57,8 @@ # le contenu de l'entrée content = db.TextProperty() # une entrée peut en citer une autre - cites = db.SelfReferenceProperty() - + cites = db.SelfReferenceProperty() + Personnalisation des vues ------------------------- @@ -75,7 +75,7 @@ - un identifiant (tous les objets dans `LAX` sont enregistrés dans un registre et cet identifiant sert de clé pour y retrouver la vue) - + - une description des types de données auxquels elle s'applique Il existe dans `LAX` des vues prédéfinies et utilisées par le moteur @@ -87,17 +87,17 @@ Par exemple, si on souhaite modifier la page principale d'une entrée de blog, il faut surcharger la vue ``primary`` des objets ``BlogEntry`` dans le fichier ``myapp/views.py``:: - + from ginco.web.views import baseviews - + class BlogEntryPrimaryView(baseviews.PrimaryView): accepts = ('BlogEntry',) - + def cell_call(self, row, col): - entity = self.entity(row, col) + entity = self.rset.get_entity(row, col) self.w(u'

    %s

    ' % entity.title) self.w(u'
    %s
    ' entity.content) - + Génération du graphique de schéma --------------------------------- @@ -105,13 +105,13 @@ Il existe une vue ``schema`` qui permet d'afficher un graphique représantant les différents types d'entités définis dans le schéma ainsi que les relations entre ces types. Ce graphique doit être généré -statiquement. Le script à utiliser pour générer ce schéma est +statiquement. Le script à utiliser pour générer ce schéma est dans ``myapp/tools``. Ce script nécessite d'avoir accès aux bibliothèques fournies par le SDK de ``Google AppEngine``. Il faut donc modifier son PYTHONPATH:: $ export PYTHONPATH=GAE_ROOT/google:GAE_ROOT/lib/yaml - $ python tools/generate_schema_img.py + $ python tools/generate_schema_img.py Génération des fichiers de traduction diff -r 15d541321a8c -r 74c1597f8a82 doc/book/fr/20-04-develop-views.fr.txt --- a/doc/book/fr/20-04-develop-views.fr.txt Wed Jan 20 10:13:02 2010 +0100 +++ b/doc/book/fr/20-04-develop-views.fr.txt Wed Jan 20 10:13:45 2010 +0100 @@ -13,7 +13,7 @@ - an identifier (all objects in `LAX` are entered in a registry and this identifier will be used as a key) - + - a filter to select the resulsets it can be applied to `LAX` provides a lot of standard views, for a complete list, you @@ -25,13 +25,13 @@ override the view ``primary`` in ``BlogDemo/views.py`` :: from ginco.web.views import baseviews - + class BlogEntryPrimaryView(baseviews.PrimaryView): accepts = ('BlogEntry',) - + def cell_call(self, row, col): - entity = self.entity(row, col) + entity = self.rset.get_entity(row, col) self.w(u'

    %s

    ' % entity.title) self.w(u'
    %s
    ' % entity.publish_date) self.w(u'
    %s
    ' % entity.category) @@ -91,7 +91,7 @@ [WRITE ME] -* show how urls are mapped to selections and views and explain URLRewriting +* show how urls are mapped to selections and views and explain URLRewriting Security ========= diff -r 15d541321a8c -r 74c1597f8a82 entities/__init__.py --- a/entities/__init__.py Wed Jan 20 10:13:02 2010 +0100 +++ b/entities/__init__.py Wed Jan 20 10:13:45 2010 +0100 @@ -22,7 +22,7 @@ """an entity instance has e_schema automagically set on the class and instances have access to their issuing cursor """ - id = 'Any' + __regid__ = 'Any' __implements__ = (IBreadCrumbs, IFeed) fetch_attrs = ('modification_date',) @@ -82,21 +82,20 @@ def dc_date(self, date_format=None):# XXX default to ISO 8601 ? """return latest modification date of this entity""" - return self.format_date(self.modification_date, date_format=date_format) + return self._cw.format_date(self.modification_date, date_format=date_format) def dc_type(self, form=''): """return the display name for the type of this entity (translated)""" - return self.e_schema.display_name(self.req, form) + return self.e_schema.display_name(self._cw, form) def dc_language(self): """return language used by this entity (translated)""" # check if entities has internationalizable attributes # XXX one is enough or check if all String attributes are internationalizable? for rschema, attrschema in self.e_schema.attribute_definitions(): - if rschema.rproperty(self.e_schema, attrschema, - 'internationalizable'): - return self.req._(self.req.user.property_value('ui.language')) - return self.req._(self.vreg.property_value('ui.language')) + if rschema.rdef(self.e_schema, attrschema).internationalizable: + return self._cw._(self._cw.user.property_value('ui.language')) + return self._cw._(self._cw.vreg.property_value('ui.language')) @property def creator(self): @@ -122,11 +121,11 @@ path = parent.breadcrumbs(view) + [self] if not recurs: if view is None: - if 'vtitle' in self.req.form: + if 'vtitle' in self._cw.form: # embeding for instance - path.append( self.req.form['vtitle'] ) - elif view.id != 'primary' and hasattr(view, 'title'): - path.append( self.req._(view.title) ) + path.append( self._cw.form['vtitle'] ) + elif view.__regid__ != 'primary' and hasattr(view, 'title'): + path.append( self._cw._(view.title) ) return path ## IFeed interface ######################################################## @@ -163,7 +162,7 @@ self.__linkto = {} except KeyError: pass - linktos = list(self.req.list_form_param('__linkto')) + linktos = list(self._cw.list_form_param('__linkto')) linkedto = [] for linkto in linktos[:]: ltrtype, eid, ltrole = linkto.split(':') @@ -172,7 +171,7 @@ # hidden input if remove: linktos.remove(linkto) - self.req.form['__linkto'] = linktos + self._cw.form['__linkto'] = linktos linkedto.append(typed_eid(eid)) self.__linkto[(rtype, role)] = linkedto return linkedto @@ -206,85 +205,6 @@ """ return () - # XXX deprecates, may be killed once old widgets system is gone ########### - - @classmethod - def get_widget(cls, rschema, x='subject'): - """return a widget to view or edit a relation - - notice that when the relation support multiple target types, the widget - is necessarily the same for all those types - """ - # let ImportError propage if web par isn't available - from cubicweb.web.widgets import widget - if isinstance(rschema, basestring): - rschema = cls.schema.rschema(rschema) - if x == 'subject': - tschema = rschema.objects(cls.e_schema)[0] - wdg = widget(cls.vreg, cls, rschema, tschema, 'subject') - else: - tschema = rschema.subjects(cls.e_schema)[0] - wdg = widget(cls.vreg, tschema, rschema, cls, 'object') - return wdg - - @deprecated('use EntityFieldsForm.subject_relation_vocabulary') - def subject_relation_vocabulary(self, rtype, limit): - form = self.vreg.select('forms', 'edition', self.req, entity=self) - return form.subject_relation_vocabulary(rtype, limit) - - @deprecated('use EntityFieldsForm.object_relation_vocabulary') - def object_relation_vocabulary(self, rtype, limit): - form = self.vreg.select('forms', 'edition', self.req, entity=self) - return form.object_relation_vocabulary(rtype, limit) - - @deprecated('use AutomaticEntityForm.[e]relations_by_category') - def relations_by_category(self, categories=None, permission=None): - from cubicweb.web.views.autoform import AutomaticEntityForm - return AutomaticEntityForm.erelations_by_category(self, categories, permission) - - @deprecated('use AutomaticEntityForm.[e]srelations_by_category') - def srelations_by_category(self, categories=None, permission=None): - from cubicweb.web.views.autoform import AutomaticEntityForm - return AutomaticEntityForm.esrelations_by_category(self, categories, permission) - - def attribute_values(self, attrname): - if self.has_eid() or attrname in self: - try: - values = self[attrname] - except KeyError: - values = getattr(self, attrname) - # actual relation return a list of entities - if isinstance(values, list): - return [v.eid for v in values] - return (values,) - # the entity is being created, try to find default value for - # this attribute - try: - values = self.req.form[attrname] - except KeyError: - try: - values = self[attrname] # copying - except KeyError: - values = getattr(self, 'default_%s' % attrname, - self.e_schema.default(attrname)) - if callable(values): - values = values() - if values is None: - values = () - elif not isinstance(values, (list, tuple)): - values = (values,) - return values - - def use_fckeditor(self, attr): - """return True if fckeditor should be used to edit entity's attribute named - `attr`, according to user preferences - """ - if self.req.use_fckeditor() and self.e_schema.has_metadata(attr, 'format'): - if self.has_eid() or '%s_format' % attr in self: - return self.attr_metadata(attr, 'format') == 'text/html' - return self.req.property_value('ui.default-text-format') == 'text/html' - return False - # XXX: store a reference to the AnyEntity class since it is hijacked in goa # configuration and we need the actual reference to avoid infinite loops # in mro diff -r 15d541321a8c -r 74c1597f8a82 entities/authobjs.py --- a/entities/authobjs.py Wed Jan 20 10:13:02 2010 +0100 +++ b/entities/authobjs.py Wed Jan 20 10:13:45 2010 +0100 @@ -13,7 +13,7 @@ from cubicweb.entities import AnyEntity, fetch_config class CWGroup(AnyEntity): - id = 'CWGroup' + __regid__ = 'CWGroup' fetch_attrs, fetch_order = fetch_config(['name']) fetch_unrelated_order = fetch_order @@ -23,7 +23,7 @@ class CWUser(AnyEntity): - id = 'CWUser' + __regid__ = 'CWUser' fetch_attrs, fetch_order = fetch_config(['login', 'firstname', 'surname']) fetch_unrelated_order = fetch_order @@ -60,12 +60,13 @@ try: # properties stored on the user aren't correctly typed # (e.g. all values are unicode string) - return self.vreg.typed_value(key, self.properties[key]) + return self._cw.vreg.typed_value(key, self.properties[key]) except KeyError: pass except ValueError: - self.warning('incorrect value for eproperty %s of user %s', key, self.login) - return self.vreg.property_value(key) + self.warning('incorrect value for eproperty %s of user %s', + key, self.login) + return self._cw.vreg.property_value(key) def matching_groups(self, groups): """return the number of the given group(s) in which the user is @@ -92,12 +93,12 @@ return self.groups == frozenset(('guests', )) def owns(self, eid): - if hasattr(self.req, 'unsafe_execute'): + if hasattr(self._cw, 'unsafe_execute'): # use unsafe_execute on the repository side, in case # session's user doesn't have access to CWUser - execute = self.req.unsafe_execute + execute = self._cw.unsafe_execute else: - execute = self.req.execute + execute = self._cw.execute try: return execute('Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s', {'x': eid, 'u': self.eid}, 'x') @@ -115,7 +116,7 @@ kwargs['x'] = contexteid cachekey = 'x' try: - return self.req.execute(rql, kwargs, cachekey) + return self._cw.execute(rql, kwargs, cachekey) except Unauthorized: return False @@ -125,7 +126,7 @@ """construct a name using firstname / surname or login if not defined""" if self.firstname and self.surname: - return self.req._('%(firstname)s %(surname)s') % { + return self._cw._('%(firstname)s %(surname)s') % { 'firstname': self.firstname, 'surname' : self.surname} if self.firstname: return self.firstname diff -r 15d541321a8c -r 74c1597f8a82 entities/lib.py --- a/entities/lib.py Wed Jan 20 10:13:02 2010 +0100 +++ b/entities/lib.py Wed Jan 20 10:13:45 2010 +0100 @@ -24,7 +24,7 @@ return '%s at %s' % (name, host.replace('.', ' dot ')) class EmailAddress(AnyEntity): - id = 'EmailAddress' + __regid__ = 'EmailAddress' fetch_attrs, fetch_order = fetch_config(['address', 'alias']) def dc_title(self): @@ -51,7 +51,7 @@ if not ('sender' in subjrels and 'recipients' in subjrels): return rql = 'DISTINCT Any X, S, D ORDERBY D DESC WHERE X sender Y or X recipients Y, X subject S, X date D, Y eid %(y)s' - rset = self.req.execute(rql, {'y': self.eid}, 'y') + rset = self._cw.execute(rql, {'y': self.eid}, 'y') if skipeids is None: skipeids = set() for i in xrange(len(rset)): @@ -62,7 +62,7 @@ yield rset.get_entity(i, 0) def display_address(self): - if self.vreg.config['mangle-emails']: + if self._cw.vreg.config['mangle-emails']: return mangle_email(self.address) return self.address @@ -82,23 +82,38 @@ return super(EmailAddress, self).after_deletion_path() -from logilab.common.deprecation import class_renamed -Emailaddress = class_renamed('Emailaddress', EmailAddress) -Emailaddress.id = 'Emailaddress' +class Bookmark(AnyEntity): + """customized class for Bookmark entities""" + __regid__ = 'Bookmark' + fetch_attrs, fetch_order = fetch_config(['title', 'path']) + + def actual_url(self): + url = self._cw.build_url(self.path) + if self.title: + urlparts = list(urlsplit(url)) + if urlparts[3]: + urlparts[3] += '&vtitle=%s' % self._cw.url_quote(self.title) + else: + urlparts[3] = 'vtitle=%s' % self._cw.url_quote(self.title) + url = urlunsplit(urlparts) + return url + + def action_url(self): + return self.absolute_url() + '/follow' class CWProperty(AnyEntity): - id = 'CWProperty' + __regid__ = 'CWProperty' fetch_attrs, fetch_order = fetch_config(['pkey', 'value']) rest_attr = 'pkey' def typed_value(self): - return self.vreg.typed_value(self.pkey, self.value) + return self._cw.vreg.typed_value(self.pkey, self.value) def dc_description(self, format='text/plain'): try: - return self.req._(self.vreg.property_info(self.pkey)['help']) + return self._cw._(self._cw.vreg.property_info(self.pkey)['help']) except UnknownProperty: return u'' @@ -109,33 +124,13 @@ return 'view', {} -class Bookmark(AnyEntity): - """customized class for Bookmark entities""" - id = 'Bookmark' - fetch_attrs, fetch_order = fetch_config(['title', 'path']) - - def actual_url(self): - url = self.req.build_url(self.path) - if self.title: - urlparts = list(urlsplit(url)) - if urlparts[3]: - urlparts[3] += '&vtitle=%s' % self.req.url_quote(self.title) - else: - urlparts[3] = 'vtitle=%s' % self.req.url_quote(self.title) - url = urlunsplit(urlparts) - return url - - def action_url(self): - return self.absolute_url() + '/follow' - - class CWCache(AnyEntity): """Cache""" - id = 'CWCache' + __regid__ = 'CWCache' fetch_attrs, fetch_order = fetch_config(['name']) def touch(self): - self.req.execute('SET X timestamp %(t)s WHERE X eid %(x)s', + self._cw.execute('SET X timestamp %(t)s WHERE X eid %(x)s', {'t': datetime.now(), 'x': self.eid}, 'x') def valid(self, date): diff -r 15d541321a8c -r 74c1597f8a82 entities/schemaobjs.py --- a/entities/schemaobjs.py Wed Jan 20 10:13:02 2010 +0100 +++ b/entities/schemaobjs.py Wed Jan 20 10:13:45 2010 +0100 @@ -16,15 +16,15 @@ class CWEType(AnyEntity): - id = 'CWEType' + __regid__ = 'CWEType' fetch_attrs, fetch_order = fetch_config(['name']) def dc_title(self): - return u'%s (%s)' % (self.name, self.req._(self.name)) + return u'%s (%s)' % (self.name, self._cw._(self.name)) def dc_long_title(self): stereotypes = [] - _ = self.req._ + _ = self._cw._ if self.final: stereotypes.append(_('final')) if stereotypes: @@ -37,15 +37,15 @@ class CWRType(AnyEntity): - id = 'CWRType' + __regid__ = 'CWRType' fetch_attrs, fetch_order = fetch_config(['name']) def dc_title(self): - return u'%s (%s)' % (self.name, self.req._(self.name)) + return u'%s (%s)' % (self.name, self._cw._(self.name)) def dc_long_title(self): stereotypes = [] - _ = self.req._ + _ = self._cw._ if self.symetric: stereotypes.append(_('symetric')) if self.inlined: @@ -63,7 +63,7 @@ * raise ValidationError if inlining is'nt possible * eventually return True """ - rschema = self.schema.rschema(self.name) + rschema = self._cw.vreg.schema.rschema(self.name) if inlined == rschema.inlined: return False if inlined: @@ -75,7 +75,7 @@ rtype = self.name stype = rdef.stype otype = rdef.otype - msg = self.req._("can't set inlined=%(inlined)s, " + msg = self._cw._("can't set inlined=%(inlined)s, " "%(stype)s %(rtype)s %(otype)s " "has cardinality=%(card)s") raise ValidationError(self.eid, {'inlined': msg % locals()}) @@ -87,7 +87,7 @@ class CWRelation(AnyEntity): - id = 'CWRelation' + __regid__ = 'CWRelation' fetch_attrs = fetch_config(['cardinality'])[0] def dc_title(self): @@ -130,7 +130,7 @@ class CWAttribute(CWRelation): - id = 'CWAttribute' + __regid__ = 'CWAttribute' def dc_long_title(self): card = self.cardinality @@ -144,7 +144,7 @@ class CWConstraint(AnyEntity): - id = 'CWConstraint' + __regid__ = 'CWConstraint' fetch_attrs, fetch_order = fetch_config(['value']) def dc_title(self): @@ -164,7 +164,7 @@ class RQLExpression(AnyEntity): - id = 'RQLExpression' + __regid__ = 'RQLExpression' fetch_attrs, fetch_order = fetch_config(['exprtype', 'mainvars', 'expression']) def dc_title(self): @@ -198,13 +198,13 @@ class CWPermission(AnyEntity): - id = 'CWPermission' + __regid__ = 'CWPermission' fetch_attrs, fetch_order = fetch_config(['name', 'label']) def dc_title(self): if self.label: - return '%s (%s)' % (self.req._(self.name), self.label) - return self.req._(self.name) + return '%s (%s)' % (self._cw._(self.name), self.label) + return self._cw._(self.name) def after_deletion_path(self): """return (path, parameters) which should be used as redirect diff -r 15d541321a8c -r 74c1597f8a82 entities/test/unittest_base.py --- a/entities/test/unittest_base.py Wed Jan 20 10:13:02 2010 +0100 +++ b/entities/test/unittest_base.py Wed Jan 20 10:13:45 2010 +0100 @@ -11,15 +11,14 @@ from logilab.common.decorators import clear_cache from logilab.common.interface import implements -from cubicweb.devtools.apptest import EnvBasedTC +from cubicweb.devtools.testlib import CubicWebTC from cubicweb import ValidationError from cubicweb.interfaces import IMileStone, IWorkflowable from cubicweb.entities import AnyEntity -from cubicweb.web.widgets import AutoCompletionWidget -class BaseEntityTC(EnvBasedTC): +class BaseEntityTC(CubicWebTC): def setup_database(self): self.member = self.create_user('member') @@ -30,7 +29,7 @@ def test_creator(self): self.login(u'member') - entity = self.add_entity('Bookmark', title=u"hello", path=u'project/cubicweb') + entity = self.request().create_entity('Bookmark', title=u"hello", path=u'project/cubicweb') self.commit() self.assertEquals(entity.creator.eid, self.member.eid) self.assertEquals(entity.dc_creator(), u'member') @@ -100,7 +99,7 @@ ) -class InterfaceTC(EnvBasedTC): +class InterfaceTC(CubicWebTC): def test_nonregr_subclasses_and_mixins_interfaces(self): CWUser = self.vreg['etypes'].etype_class('CWUser') @@ -120,7 +119,7 @@ self.failIf(implements(MyUser, IWorkflowable)) -class SpecializedEntityClassesTC(EnvBasedTC): +class SpecializedEntityClassesTC(CubicWebTC): def select_eclass(self, etype): # clear selector cache @@ -137,7 +136,7 @@ self.vreg._loadedmods[__name__] = {} for etype in ('Company', 'Division', 'SubDivision'): class Foo(AnyEntity): - id = etype + __regid__ = etype self.vreg.register_appobject_class(Foo) eclass = self.select_eclass('SubDivision') self.failUnless(eclass.__autogenerated__) @@ -148,7 +147,7 @@ self.assertEquals(eclass.__bases__[0].__bases__, (Foo,)) # check Division eclass is still selected for plain Division entities eclass = self.select_eclass('Division') - self.assertEquals(eclass.id, 'Division') + self.assertEquals(eclass.__regid__, 'Division') if __name__ == '__main__': unittest_main() diff -r 15d541321a8c -r 74c1597f8a82 entities/test/unittest_wfobjs.py --- a/entities/test/unittest_wfobjs.py Wed Jan 20 10:13:02 2010 +0100 +++ b/entities/test/unittest_wfobjs.py Wed Jan 20 10:13:45 2010 +0100 @@ -1,4 +1,4 @@ -from cubicweb.devtools.apptest import EnvBasedTC +from cubicweb.devtools.testlib import CubicWebTC from cubicweb import ValidationError def add_wf(self, etype, name=None, default=False): @@ -18,7 +18,7 @@ for ti in wfhist] -class WorkflowBuildingTC(EnvBasedTC): +class WorkflowBuildingTC(CubicWebTC): def test_wf_construction(self): wf = add_wf(self, 'Company') @@ -45,7 +45,6 @@ # gnark gnark bar = wf.add_state(u'bar') self.commit() - print '*'*80 bar.set_attributes(name=u'foo') ex = self.assertRaises(ValidationError, self.commit) self.assertEquals(ex.errors, {'name': 'workflow already have a state of that name'}) @@ -72,12 +71,12 @@ self.assertEquals(ex.errors, {'name': 'workflow already have a transition of that name'}) -class WorkflowTC(EnvBasedTC): +class WorkflowTC(CubicWebTC): def setup_database(self): rschema = self.schema['in_state'] - for x, y in rschema.iter_rdefs(): - self.assertEquals(rschema.rproperty(x, y, 'cardinality'), '1*') + for rdef in rschema.rdefs.values(): + self.assertEquals(rdef.cardinality, '1*') self.member = self.create_user('member') def test_workflow_base(self): @@ -127,7 +126,7 @@ wf = add_wf(self, 'CWUser') s = wf.add_state(u'foo', initial=True) self.commit() - ex = self.assertRaises(ValidationError, self.session().unsafe_execute, + ex = self.assertRaises(ValidationError, self.session.unsafe_execute, 'SET X in_state S WHERE X eid %(x)s, S eid %(s)s', {'x': self.user().eid, 's': s.eid}, 'x') self.assertEquals(ex.errors, {'in_state': "state doesn't belong to entity's workflow. " @@ -210,7 +209,7 @@ [(swfstate2, state2), (swfstate3, state3)]) self.assertEquals(swftr1.destination().eid, swfstate1.eid) # workflows built, begin test - self.group = self.add_entity('CWGroup', name=u'grp1') + self.group = self.request().create_entity('CWGroup', name=u'grp1') self.commit() self.assertEquals(self.group.current_state.eid, state1.eid) self.assertEquals(self.group.current_workflow.eid, mwf.eid) @@ -234,7 +233,7 @@ # subworkflow input transition ex = self.assertRaises(ValidationError, self.group.change_state, swfstate1, u'gadget') - self.assertEquals(ex.errors, {'to_state': "state doesn't belong to entity's current workflow"}) + self.assertEquals(ex.errors, {'to_state': "state doesn't belong to entity's workflow"}) self.rollback() # force back to state1 self.group.change_state('state1', u'gadget') @@ -295,7 +294,7 @@ twf.add_wftransition(_('close'), subwf, (released,), [(xsigned, closed), (xaborted, released)]) self.commit() - group = self.add_entity('CWGroup', name=u'grp1') + group = self.request().create_entity('CWGroup', name=u'grp1') self.commit() for trans in ('identify', 'release', 'close'): group.fire_transition(trans) @@ -320,7 +319,7 @@ twf.add_wftransition(_('release'), subwf, identified, [(xaborted, None)]) self.commit() - group = self.add_entity('CWGroup', name=u'grp1') + group = self.request().create_entity('CWGroup', name=u'grp1') self.commit() for trans, nextstate in (('identify', 'xsigning'), ('xabort', 'created'), @@ -335,15 +334,11 @@ self.assertEquals(group.state, nextstate) -class CustomWorkflowTC(EnvBasedTC): +class CustomWorkflowTC(CubicWebTC): def setup_database(self): self.member = self.create_user('member') - def tearDown(self): - super(CustomWorkflowTC, self).tearDown() - self.execute('DELETE X custom_workflow WF') - def test_custom_wf_replace_state_no_history(self): """member in inital state with no previous history, state is simply redirected when changing workflow @@ -394,7 +389,7 @@ wf = add_wf(self, 'Company') wf.add_state('asleep', initial=True) self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': self.member.eid}) + {'wf': wf.eid, 'x': self.member.eid}, 'x') ex = self.assertRaises(ValidationError, self.commit) self.assertEquals(ex.errors, {'custom_workflow': 'workflow isn\'t a workflow for this type'}) @@ -422,8 +417,7 @@ ('asleep', 'activated', None, 'workflow changed to "default user workflow"'),]) - -class AutoTransitionTC(EnvBasedTC): +class AutoTransitionTC(CubicWebTC): def setup_database(self): self.wf = add_wf(self, 'CWUser') @@ -463,18 +457,17 @@ ('asleep', 'dead', 'sick', None),]) -from cubicweb.devtools.apptest import RepositoryBasedTC - -class WorkflowHooksTC(RepositoryBasedTC): +class WorkflowHooksTC(CubicWebTC): def setUp(self): - RepositoryBasedTC.setUp(self) + CubicWebTC.setUp(self) self.wf = self.session.user.current_workflow + self.session.set_pool() self.s_activated = self.wf.state_by_name('activated').eid self.s_deactivated = self.wf.state_by_name('deactivated').eid self.s_dummy = self.wf.add_state(u'dummy').eid self.wf.add_transition(u'dummy', (self.s_deactivated,), self.s_dummy) - ueid = self.create_user('stduser', commit=False) + ueid = self.create_user('stduser', commit=False).eid # test initial state is set rset = self.execute('Any N WHERE S name N, X in_state S, X eid %(x)s', {'x' : ueid}) @@ -491,13 +484,6 @@ {'wf': self.wf.eid}) self.commit() - def tearDown(self): - self.execute('DELETE X require_group G ' - 'WHERE G name "users", X transition_of WF, WF eid %(wf)s', - {'wf': self.wf.eid}) - self.commit() - RepositoryBasedTC.tearDown(self) - # XXX currently, we've to rely on hooks to set initial state, or to use unsafe_execute # def test_initial_state(self): # cnx = self.login('stduser') @@ -522,7 +508,7 @@ def test_transition_checking1(self): cnx = self.login('stduser') - user = cnx.user(self.current_session()) + user = cnx.user(self.session) ex = self.assertRaises(ValidationError, user.fire_transition, 'activate') self.assertEquals(self._cleanup_msg(ex.errors['by_transition']), @@ -531,8 +517,7 @@ def test_transition_checking2(self): cnx = self.login('stduser') - user = cnx.user(self.current_session()) - assert user.state == 'activated' + user = cnx.user(self.session) ex = self.assertRaises(ValidationError, user.fire_transition, 'dummy') self.assertEquals(self._cleanup_msg(ex.errors['by_transition']), @@ -541,7 +526,7 @@ def test_transition_checking3(self): cnx = self.login('stduser') - session = self.current_session() + session = self.session user = cnx.user(session) user.fire_transition('deactivate') cnx.commit() diff -r 15d541321a8c -r 74c1597f8a82 entities/wfobjs.py --- a/entities/wfobjs.py Wed Jan 20 10:13:02 2010 +0100 +++ b/entities/wfobjs.py Wed Jan 20 10:13:45 2010 +0100 @@ -15,12 +15,12 @@ from cubicweb.entities import AnyEntity, fetch_config from cubicweb.interfaces import IWorkflowable -from cubicweb.common.mixins import MI_REL_TRIGGERS +from cubicweb.mixins import MI_REL_TRIGGERS class WorkflowException(Exception): pass class Workflow(AnyEntity): - id = 'Workflow' + __regid__ = 'Workflow' @property def initial(self): @@ -52,7 +52,7 @@ _done = set() yield self _done.add(self.eid) - for tr in self.req.execute('Any T WHERE T is WorkflowTransition, ' + for tr in self._cw.execute('Any T WHERE T is WorkflowTransition, ' 'T transition_of WF, WF eid %(wf)s', {'wf': self.eid}).entities(): if tr.subwf.eid in _done: @@ -63,7 +63,7 @@ # state / transitions accessors ############################################ def state_by_name(self, statename): - rset = self.req.execute('Any S, SN WHERE S name SN, S name %(n)s, ' + rset = self._cw.execute('Any S, SN WHERE S name SN, S name %(n)s, ' 'S state_of WF, WF eid %(wf)s', {'n': statename, 'wf': self.eid}, 'wf') if rset: @@ -71,7 +71,7 @@ return None def state_by_eid(self, eid): - rset = self.req.execute('Any S, SN WHERE S name SN, S eid %(s)s, ' + rset = self._cw.execute('Any S, SN WHERE S name SN, S eid %(s)s, ' 'S state_of WF, WF eid %(wf)s', {'s': eid, 'wf': self.eid}, ('wf', 's')) if rset: @@ -79,7 +79,7 @@ return None def transition_by_name(self, trname): - rset = self.req.execute('Any T, TN WHERE T name TN, T name %(n)s, ' + rset = self._cw.execute('Any T, TN WHERE T name TN, T name %(n)s, ' 'T transition_of WF, WF eid %(wf)s', {'n': trname, 'wf': self.eid}, 'wf') if rset: @@ -87,7 +87,7 @@ return None def transition_by_eid(self, eid): - rset = self.req.execute('Any T, TN WHERE T name TN, T eid %(t)s, ' + rset = self._cw.execute('Any T, TN WHERE T name TN, T eid %(t)s, ' 'T transition_of WF, WF eid %(wf)s', {'t': eid, 'wf': self.eid}, ('wf', 't')) if rset: @@ -98,20 +98,20 @@ def add_state(self, name, initial=False, **kwargs): """add a state to this workflow""" - state = self.req.create_entity('State', name=unicode(name), **kwargs) - self.req.execute('SET S state_of WF WHERE S eid %(s)s, WF eid %(wf)s', + state = self._cw.create_entity('State', name=unicode(name), **kwargs) + self._cw.execute('SET S state_of WF WHERE S eid %(s)s, WF eid %(wf)s', {'s': state.eid, 'wf': self.eid}, ('s', 'wf')) if initial: assert not self.initial, "Initial state already defined as %s" % self.initial - self.req.execute('SET WF initial_state S ' + self._cw.execute('SET WF initial_state S ' 'WHERE S eid %(s)s, WF eid %(wf)s', {'s': state.eid, 'wf': self.eid}, ('s', 'wf')) return state def _add_transition(self, trtype, name, fromstates, requiredgroups=(), conditions=(), **kwargs): - tr = self.req.create_entity(trtype, name=unicode(name), **kwargs) - self.req.execute('SET T transition_of WF ' + tr = self._cw.create_entity(trtype, name=unicode(name), **kwargs) + self._cw.execute('SET T transition_of WF ' 'WHERE T eid %(t)s, WF eid %(wf)s', {'t': tr.eid, 'wf': self.eid}, ('t', 'wf')) assert fromstates, fromstates @@ -120,7 +120,7 @@ for state in fromstates: if hasattr(state, 'eid'): state = state.eid - self.req.execute('SET S allowed_transition T ' + self._cw.execute('SET S allowed_transition T ' 'WHERE S eid %(s)s, T eid %(t)s', {'s': state, 't': tr.eid}, ('s', 't')) tr.set_transition_permissions(requiredgroups, conditions, reset=False) @@ -134,7 +134,7 @@ if tostate is not None: if hasattr(tostate, 'eid'): tostate = tostate.eid - self.req.execute('SET T destination_state S ' + self._cw.execute('SET T destination_state S ' 'WHERE S eid %(s)s, T eid %(t)s', {'t': tr.eid, 's': tostate}, ('s', 't')) return tr @@ -146,7 +146,7 @@ requiredgroups, conditions, **kwargs) if hasattr(subworkflow, 'eid'): subworkflow = subworkflow.eid - assert self.req.execute('SET T subworkflow WF WHERE WF eid %(wf)s,T eid %(t)s', + assert self._cw.execute('SET T subworkflow WF WHERE WF eid %(wf)s,T eid %(t)s', {'t': tr.eid, 'wf': subworkflow}, ('wf', 't')) for fromstate, tostate in exitpoints: tr.add_exit_point(fromstate, tostate) @@ -173,11 +173,11 @@ provides a specific may_be_fired method to check if the relation may be fired by the logged user """ - id = 'BaseTransition' + __regid__ = 'BaseTransition' fetch_attrs, fetch_order = fetch_config(['name']) def __init__(self, *args, **kwargs): - if self.id == 'BaseTransition': + if self.__regid__ == 'BaseTransition': raise WorkflowException('should not be instantiated') super(BaseTransition, self).__init__(*args, **kwargs) @@ -195,7 +195,7 @@ `eid` is the eid of the object on which we may fire the transition """ - user = self.req.user + user = self._cw.user # check user is at least in one of the required groups if any groups = frozenset(g.name for g in self.require_group) if groups: @@ -207,7 +207,7 @@ # check one of the rql expression conditions matches if any if self.condition: for rqlexpr in self.condition: - if rqlexpr.check_expression(self.req, eid): + if rqlexpr.check_expression(self._cw, eid): return True if self.condition or groups: return False @@ -227,12 +227,12 @@ transition """ if reset: - self.req.execute('DELETE T require_group G WHERE T eid %(x)s', + self._cw.execute('DELETE T require_group G WHERE T eid %(x)s', {'x': self.eid}, 'x') - self.req.execute('DELETE T condition R WHERE T eid %(x)s', + self._cw.execute('DELETE T condition R WHERE T eid %(x)s', {'x': self.eid}, 'x') for gname in requiredgroups: - rset = self.req.execute('SET T require_group G ' + rset = self._cw.execute('SET T require_group G ' 'WHERE T eid %(x)s, G name %(gn)s', {'x': self.eid, 'gn': gname}, 'x') assert rset, '%s is not a known group' % gname @@ -246,15 +246,15 @@ kwargs = expr kwargs['x'] = self.eid kwargs.setdefault('mainvars', u'X') - self.req.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", ' + self._cw.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", ' 'X expression %(expr)s, X mainvars %(mainvars)s, ' - 'T condition X WHERE T eid %(x)s', kwargs, 'x') + 'T condition X WHERE T eid %(x)s',kwargs, 'x') # XXX clear caches? class Transition(BaseTransition): """customized class for Transition entities""" - id = 'Transition' + __regid__ = 'Transition' def destination(self): return self.destination_state[0] @@ -265,7 +265,7 @@ class WorkflowTransition(BaseTransition): """customized class for WorkflowTransition entities""" - id = 'WorkflowTransition' + __regid__ = 'WorkflowTransition' @property def subwf(self): @@ -278,13 +278,13 @@ if hasattr(fromstate, 'eid'): fromstate = fromstate.eid if tostate is None: - self.req.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, ' + self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, ' 'X subworkflow_state FS WHERE T eid %(t)s, FS eid %(fs)s', {'t': self.eid, 'fs': fromstate}, ('t', 'fs')) else: if hasattr(tostate, 'eid'): tostate = tostate.eid - self.req.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, ' + self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, ' 'X subworkflow_state FS, X destination_state TS ' 'WHERE T eid %(t)s, FS eid %(fs)s, TS eid %(ts)s', {'t': self.eid, 'fs': fromstate, 'ts': tostate}, @@ -301,7 +301,7 @@ if tostateeid is None: # go back to state from which we've entered the subworkflow return entity.subworkflow_input_trinfo().previous_state - return self.req.entity_from_eid(tostateeid) + return self._cw.entity_from_eid(tostateeid) @cached def exit_points(self): @@ -317,7 +317,7 @@ class SubWorkflowExitPoint(AnyEntity): """customized class for SubWorkflowExitPoint entities""" - id = 'SubWorkflowExitPoint' + __regid__ = 'SubWorkflowExitPoint' @property def subwf_state(self): @@ -333,7 +333,7 @@ class State(AnyEntity): """customized class for State entities""" - id = 'State' + __regid__ = 'State' fetch_attrs, fetch_order = fetch_config(['name']) rest_attr = 'eid' @@ -349,7 +349,7 @@ class TrInfo(AnyEntity): """customized class for Transition information entities """ - id = 'TrInfo' + __regid__ = 'TrInfo' fetch_attrs, fetch_order = fetch_config(['creation_date', 'comment'], pclass=None) # don't want modification_date @property @@ -410,7 +410,7 @@ """return current state name translated to context's language""" state = self.current_state if state: - return self.req._(state.name) + return self._cw._(state.name) return u'' @property @@ -430,11 +430,12 @@ @cached def cwetype_workflow(self): """return the default workflow for entities of this type""" - wfrset = self.req.execute('Any WF WHERE ET default_workflow WF, ' - 'ET name %(et)s', {'et': self.id}) + # XXX CWEType method + wfrset = self._cw.execute('Any WF WHERE ET default_workflow WF, ' + 'ET name %(et)s', {'et': self.__regid__}) if wfrset: return wfrset.get_entity(0, 0) - self.warning("can't find any workflow for %s", self.id) + self.warning("can't find any workflow for %s", self.__regid__) return None def possible_transitions(self, type='normal'): @@ -444,7 +445,7 @@ """ if self.current_state is None or self.current_workflow is None: return - rset = self.req.execute( + rset = self._cw.execute( 'Any T,TT, TN WHERE S allowed_transition T, S eid %(x)s, ' 'T type TT, T type %(type)s, ' 'T name TN, T transition_of WF, WF eid %(wfeid)s', @@ -462,10 +463,10 @@ kwargs['comment_format'] = commentformat kwargs['wf_info_for'] = self if treid is not None: - kwargs['by_transition'] = self.req.entity_from_eid(treid) + kwargs['by_transition'] = self._cw.entity_from_eid(treid) if tseid is not None: - kwargs['to_state'] = self.req.entity_from_eid(tseid) - return self.req.create_entity('TrInfo', **kwargs) + kwargs['to_state'] = self._cw.entity_from_eid(tseid) + return self._cw.create_entity('TrInfo', **kwargs) def fire_transition(self, tr, comment=None, commentformat=None): """change the entity's state by firing transition of the given name in @@ -474,7 +475,8 @@ assert self.current_workflow if isinstance(tr, basestring): _tr = self.current_workflow.transition_by_name(tr) - assert _tr is not None, 'not a %s transition: %s' % (self.id, tr) + assert _tr is not None, 'not a %s transition: %s' % ( + self.__regid__, tr) tr = _tr return self._add_trinfo(comment, commentformat, tr.eid) @@ -494,7 +496,7 @@ else: state = self.current_workflow.state_by_name(statename) if state is None: - raise WorkflowException('not a %s state: %s' % (self.id, + raise WorkflowException('not a %s state: %s' % (self.__regid__, statename)) stateeid = state.eid # XXX try to find matching transition? @@ -532,7 +534,7 @@ super(WorkflowableMixIn, self).clear_all_caches() clear_cache(self, 'cwetype_workflow') - @deprecated('get transition from current workflow and use its may_be_fired method') + @deprecated('[3.5] get transition from current workflow and use its may_be_fired method') def can_pass_transition(self, trname): """return the Transition instance if the current user can fire the transition with the given name, else None @@ -542,8 +544,8 @@ return tr @property - @deprecated('use printable_state') + @deprecated('[3.5] use printable_state') def displayable_state(self): - return self.req._(self.state) + return self._cw._(self.state) MI_REL_TRIGGERS[('in_state', 'subject')] = WorkflowableMixIn diff -r 15d541321a8c -r 74c1597f8a82 entity.py --- a/entity.py Wed Jan 20 10:13:02 2010 +0100 +++ b/entity.py Wed Jan 20 10:13:45 2010 +0100 @@ -22,119 +22,24 @@ from cubicweb.rset import ResultSet from cubicweb.selectors import yes from cubicweb.appobject import AppObject +from cubicweb.schema import RQLVocabularyConstraint, RQLConstraint from cubicweb.rqlrewrite import RQLRewriter -from cubicweb.schema import RQLVocabularyConstraint, RQLConstraint, bw_normalize_etype -from cubicweb.common.uilib import printable_value, soup2xhtml -from cubicweb.common.mixins import MI_REL_TRIGGERS -from cubicweb.common.mttransforms import ENGINE +from cubicweb.uilib import printable_value, soup2xhtml +from cubicweb.mixins import MI_REL_TRIGGERS +from cubicweb.mttransforms import ENGINE _marker = object() def greater_card(rschema, subjtypes, objtypes, index): for subjtype in subjtypes: for objtype in objtypes: - card = rschema.rproperty(subjtype, objtype, 'cardinality')[index] + card = rschema.rdef(subjtype, objtype).cardinality[index] if card in '+*': return card return '1' -_MODE_TAGS = set(('link', 'create')) -_CATEGORY_TAGS = set(('primary', 'secondary', 'generic', 'generated')) # , 'metadata')) - -try: - from cubicweb.web import formwidgets, uicfg - - def _dispatch_rtags(tags, rtype, role, stype, otype): - for tag in tags: - if tag in _MODE_TAGS: - uicfg.actionbox_appearsin_addmenu.tag_relation( - (stype, rtype, otype, role), tag == 'create') - elif tag in _CATEGORY_TAGS: - uicfg.autoform_section.tag_relation((stype, rtype, otype, role), - tag) - elif tag == 'inlineview': - uicfg.autoform_is_inlined.tag_relation((stype, rtype, otype, role), True) - else: - raise ValueError(tag) - -except ImportError: - - _dispatch_rtags = None - -def _get_etype(bases, classdict): - try: - return classdict['id'] - except KeyError: - for base in bases: - etype = getattr(base, 'id', None) - if etype and etype != 'Any': - return etype - -def _get_defs(attr, name, bases, classdict): - try: - yield name, classdict.pop(attr) - except KeyError: - for base in bases: - try: - value = getattr(base, attr) - delattr(base, attr) - yield base.__name__, value - except AttributeError: - continue - - -class _metaentity(type): - """this metaclass sets the relation tags on the entity class - and deals with the `widgets` attribute - """ - def __new__(mcs, name, bases, classdict): - # collect baseclass' rtags - etype = _get_etype(bases, classdict) - if etype and _dispatch_rtags is not None: - for name, rtags in _get_defs('__rtags__', name, bases, classdict): - warn('%s: __rtags__ is deprecated' % name, DeprecationWarning) - for relation, tags in rtags.iteritems(): - # tags must become an iterable - if isinstance(tags, basestring): - tags = (tags,) - # relation must become a 3-uple (rtype, targettype, role) - if isinstance(relation, basestring): - _dispatch_rtags(tags, relation, 'subject', etype, '*') - _dispatch_rtags(tags, relation, 'object', '*', etype) - elif len(relation) == 1: # useful ? - _dispatch_rtags(tags, relation[0], 'subject', etype, '*') - _dispatch_rtags(tags, relation[0], 'object', '*', etype) - elif len(relation) == 2: - rtype, ttype = relation - ttype = bw_normalize_etype(ttype) # XXX bw compat - _dispatch_rtags(tags, rtype, 'subject', etype, ttype) - _dispatch_rtags(tags, rtype, 'object', ttype, etype) - elif len(relation) == 3: - rtype, ttype, role = relation - ttype = bw_normalize_etype(ttype) - if role == 'subject': - _dispatch_rtags(tags, rtype, 'subject', etype, ttype) - else: - _dispatch_rtags(tags, rtype, 'object', ttype, etype) - else: - raise ValueError('bad rtag definition (%r)' % (relation,)) - for name, widgets in _get_defs('widgets', name, bases, classdict): - warn('%s: widgets is deprecated' % name, DeprecationWarning) - for rtype, wdgname in widgets.iteritems(): - if wdgname in ('URLWidget', 'EmbededURLWidget', 'RawDynamicComboBoxWidget'): - warn('%s widget is deprecated' % wdgname, DeprecationWarning) - continue - if wdgname == 'StringWidget': - wdgname = 'TextInput' - widget = getattr(formwidgets, wdgname) - assert hasattr(widget, 'render') - uicfg.autoform_field_kwargs.tag_subject_of( - (etype, rtype, '*'), {'widget': widget}) - return super(_metaentity, mcs).__new__(mcs, name, bases, classdict) - - class Entity(AppObject, dict): """an entity instance has e_schema automagically set on the class and instances has access to their issuing cursor. @@ -158,28 +63,24 @@ as composite relations or relations that have '?1' as object cardinality are always skipped. """ - __metaclass__ = _metaentity __registry__ = 'etypes' __select__ = yes() # class attributes that must be set in class definition - id = None rest_attr = None fetch_attrs = None skip_copy_for = ('in_state',) # class attributes set automatically at registration time e_schema = None - MODE_TAGS = set(('link', 'create')) - CATEGORY_TAGS = set(('primary', 'secondary', 'generic', 'generated')) # , 'metadata')) @classmethod - def __initialize__(cls): + def __initialize__(cls, schema): """initialize a specific entity class by adding descriptors to access entity type's attributes and relations """ - etype = cls.id + etype = cls.__regid__ assert etype != 'Any', etype - cls.e_schema = eschema = cls.schema.eschema(etype) + cls.e_schema = eschema = schema.eschema(etype) for rschema, _ in eschema.attribute_definitions(): if rschema.type == 'eid': continue @@ -218,7 +119,7 @@ """return a rql to fetch all entities of the class type""" restrictions = restriction or [] if settype: - restrictions.append('%s is %s' % (mainvar, cls.id)) + restrictions.append('%s is %s' % (mainvar, cls.__regid__)) if fetchattrs is None: fetchattrs = cls.fetch_attrs selection = [mainvar] @@ -251,9 +152,10 @@ rschema = eschema.subjrels[attr] except KeyError: cls.warning('skipping fetch_attr %s defined in %s (not found in schema)', - attr, cls.id) + attr, cls.__regid__) continue - if not user.matching_groups(rschema.get_groups('read')): + rdef = eschema.rdef(attr) + if not user.matching_groups(rdef.get_groups('read')): continue var = varmaker.next() selection.append(var) @@ -262,7 +164,7 @@ if not rschema.final: # XXX this does not handle several destination types desttype = rschema.objects(eschema.type)[0] - card = rschema.rproperty(eschema, desttype, 'cardinality')[0] + card = rdef.cardinality[0] if card not in '?1': cls.warning('bad relation %s specified in fetch attrs for %s', attr, cls) @@ -274,7 +176,8 @@ # that case the relation may still be missing. As we miss this # later information here, systematically add it. restrictions[-1] += '?' - destcls = cls.vreg['etypes'].etype_class(desttype) + # XXX user._cw.vreg iiiirk + destcls = user._cw.vreg['etypes'].etype_class(desttype) destcls._fetch_restrictions(var, varmaker, destcls.fetch_attrs, selection, orderby, restrictions, user, ordermethod, visited=visited) @@ -285,14 +188,6 @@ @classmethod @cached - def parent_classes(cls): - parents = [cls.vreg['etypes'].etype_class(e.type) - for e in cls.e_schema.ancestors()] - parents.append(cls.vreg['etypes'].etype_class('Any')) - return parents - - @classmethod - @cached def _rest_attr_info(cls): mainattr, needcheck = 'eid', True if cls.rest_attr: @@ -309,7 +204,7 @@ return mainattr, needcheck def __init__(self, req, rset=None, row=None, col=0): - AppObject.__init__(self, req, rset, row, col) + AppObject.__init__(self, req, rset=rset, row=row, col=col) dict.__init__(self) self._related_cache = {} if rset is not None: @@ -363,24 +258,26 @@ @cached def metainformation(self): - res = dict(zip(('type', 'source', 'extid'), self.req.describe(self.eid))) - res['source'] = self.req.source_defs()[res['source']] + res = dict(zip(('type', 'source', 'extid'), self._cw.describe(self.eid))) + res['source'] = self._cw.source_defs()[res['source']] return res def clear_local_perm_cache(self, action): for rqlexpr in self.e_schema.get_rqlexprs(action): - self.req.local_perm_cache.pop((rqlexpr.eid, (('x', self.eid),)), None) + self._cw.local_perm_cache.pop((rqlexpr.eid, (('x', self.eid),)), None) def check_perm(self, action): - self.e_schema.check_perm(self.req, action, self.eid) + self.e_schema.check_perm(self._cw, action, eid=self.eid) def has_perm(self, action): - return self.e_schema.has_perm(self.req, action, self.eid) + return self.e_schema.has_perm(self._cw, action, eid=self.eid) def view(self, vid, __registry='views', **kwargs): """shortcut to apply a view on this entity""" - return self.vreg[__registry].render(vid, self.req, rset=self.rset, - row=self.row, col=self.col, **kwargs) + view = self._cw.vreg[__registry].select(vid, self._cw, rset=self.cw_rset, + row=self.cw_row, col=self.cw_col, + **kwargs) + return view.render(row=self.cw_row, col=self.cw_col, **kwargs) def absolute_url(self, *args, **kwargs): """return an absolute url to view this entity""" @@ -394,18 +291,18 @@ # in linksearch mode, we don't want external urls else selecting # the object for use in the relation is tricky # XXX search_state is web specific - if getattr(self.req, 'search_state', ('normal',))[0] == 'normal': + if getattr(self._cw, 'search_state', ('normal',))[0] == 'normal': kwargs['base_url'] = self.metainformation()['source'].get('base-url') if method in (None, 'view'): try: kwargs['_restpath'] = self.rest_path(kwargs.get('base_url')) except TypeError: - warn('%s: rest_path() now take use_ext_eid argument, ' - 'please update' % self.id, DeprecationWarning) + warn('[3.4] %s: rest_path() now take use_ext_eid argument, ' + 'please update' % self.__regid__, DeprecationWarning) kwargs['_restpath'] = self.rest_path() else: kwargs['rql'] = 'Any X WHERE X eid %s' % self.eid - return self.build_url(method, **kwargs) + return self._cw.build_url(method, **kwargs) def rest_path(self, use_ext_eid=False): """returns a REST-like (relative) path for this entity""" @@ -421,7 +318,7 @@ # make sure url is not ambiguous rql = 'Any COUNT(X) WHERE X is %s, X %s %%(value)s' % ( etype, mainattr) - nbresults = self.req.execute(rql, {'value' : value})[0][0] + nbresults = self._cw.execute(rql, {'value' : value})[0][0] if nbresults != 1: # ambiguity? mainattr = 'eid' path += '/eid' @@ -430,13 +327,13 @@ value = self.metainformation()['extid'] else: value = self.eid - return '%s/%s' % (path, self.req.url_quote(value)) + return '%s/%s' % (path, self._cw.url_quote(value)) def attr_metadata(self, attr, metadata): """return a metadata for an attribute (None if unspecified)""" value = getattr(self, '%s_%s' % (attr, metadata), None) if value is None and metadata == 'encoding': - value = self.vreg.property_value('ui.encoding') + value = self._cw.vreg.property_value('ui.encoding') return value def printable_value(self, attr, value=_marker, attrtype=None, @@ -453,16 +350,16 @@ return u'' if attrtype is None: attrtype = self.e_schema.destination(attr) - props = self.e_schema.rproperties(attr) + props = self.e_schema.rdef(attr) if attrtype == 'String': # internalinalized *and* formatted string such as schema # description... - if props.get('internationalizable'): - value = self.req._(value) + if props.internationalizable: + value = self._cw._(value) attrformat = self.attr_metadata(attr, 'format') if attrformat: return self.mtc_transform(value, attrformat, format, - self.req.encoding) + self._cw.encoding) elif attrtype == 'Bytes': attrformat = self.attr_metadata(attr, 'format') if attrformat: @@ -470,7 +367,7 @@ return self.mtc_transform(value.getvalue(), attrformat, format, encoding) return u'' - value = printable_value(self.req, attrtype, value, props, + value = printable_value(self._cw, attrtype, value, props, displaytime=displaytime) if format == 'text/html': value = xml_escape(value) @@ -481,7 +378,7 @@ trdata = TransformData(data, format, encoding, appobject=self) data = _engine.convert(trdata, target_format).decode() if format == 'text/html': - data = soup2xhtml(data, self.req.encoding) + data = soup2xhtml(data, self._cw.encoding) return data # entity cloning ########################################################## @@ -495,7 +392,7 @@ Overrides this if you want another behaviour """ assert self.has_eid() - execute = self.req.execute + execute = self._cw.execute for rschema in self.e_schema.subject_relations(): if rschema.final or rschema.meta: continue @@ -505,11 +402,12 @@ if rschema.type in self.skip_copy_for: continue # skip composite relation - if self.e_schema.subjrproperty(rschema, 'composite'): + rdef = self.e_schema.rdef(rschema) + if rdef.composite: continue # skip relation with card in ?1 else we either change the copied # object (inlined relation) or inserting some inconsistency - if self.e_schema.subjrproperty(rschema, 'cardinality')[1] in '?1': + if rdef.cardinality[1] in '?1': continue rql = 'SET X %s V WHERE X eid %%(x)s, Y eid %%(y)s, Y %s V' % ( rschema.type, rschema.type) @@ -519,14 +417,15 @@ if rschema.meta: continue # skip already defined relations - if getattr(self, 'reverse_%s' % rschema.type): + if self.related(rschema.type, 'object'): continue + rdef = self.e_schema.rdef(rschema, 'object') # skip composite relation - if self.e_schema.objrproperty(rschema, 'composite'): + if rdef.composite: continue # skip relation with card in ?1 else we either change the copied # object (inlined relation) or inserting some inconsistency - if self.e_schema.objrproperty(rschema, 'cardinality')[0] in '?1': + if rdef.cardinality[0] in '?1': continue rql = 'SET V %s X WHERE X eid %%(x)s, Y eid %%(y)s, V %s Y' % ( rschema.type, rschema.type) @@ -539,23 +438,24 @@ def as_rset(self): """returns a resultset containing `self` information""" rset = ResultSet([(self.eid,)], 'Any X WHERE X eid %(x)s', - {'x': self.eid}, [(self.id,)]) - return self.req.decorate_rset(rset) + {'x': self.eid}, [(self.__regid__,)]) + return self._cw.decorate_rset(rset) def to_complete_relations(self): """by default complete final relations to when calling .complete()""" for rschema in self.e_schema.subject_relations(): if rschema.final: continue - if len(rschema.objects(self.e_schema)) > 1: + targets = rschema.objects(self.e_schema) + if len(targets) > 1: # ambigous relations, the querier doesn't handle # outer join correctly in this case continue if rschema.inlined: - matching_groups = self.req.user.matching_groups - if matching_groups(rschema.get_groups('read')) and \ - all(matching_groups(es.get_groups('read')) - for es in rschema.objects(self.e_schema)): + matching_groups = self._cw.user.matching_groups + rdef = rschema.rdef(self.e_schema, targets[0]) + if matching_groups(rdef.get_groups('read')) and \ + all(matching_groups(e.get_groups('read')) for e in targets): yield rschema, 'subject' def to_complete_attributes(self, skip_bytes=True): @@ -567,7 +467,8 @@ if attr == 'eid': continue # password retreival is blocked at the repository server level - if not self.req.user.matching_groups(rschema.get_groups('read')) \ + rdef = rschema.rdef(self.e_schema, attrschema) + if not self._cw.user.matching_groups(rdef.get_groups('read')) \ or attrschema.type == 'Password': self[attr] = None continue @@ -603,31 +504,28 @@ if self.relation_cached(rtype, role): continue var = varmaker.next() + targettype = rschema.targets(self.e_schema, role)[0] + rdef = rschema.role_rdef(self.e_schema, targettype, role) + card = rdef.role_cardinality(role) + assert card in '1?', '%s %s %s %s' % (self.e_schema, rtype, + role, card) if role == 'subject': - targettype = rschema.objects(self.e_schema)[0] - card = rschema.rproperty(self.e_schema, targettype, - 'cardinality')[0] if card == '1': rql.append('%s %s %s' % (V, rtype, var)) - else: # '?" + else: rql.append('%s %s %s?' % (V, rtype, var)) else: - targettype = rschema.subjects(self.e_schema)[1] - card = rschema.rproperty(self.e_schema, targettype, - 'cardinality')[1] if card == '1': rql.append('%s %s %s' % (var, rtype, V)) - else: # '?" + else: rql.append('%s? %s %s' % (var, rtype, V)) - assert card in '1?', '%s %s %s %s' % (self.e_schema, rtype, - role, card) selected.append(((rtype, role), var)) if selected: # select V, we need it as the left most selected variable # if some outer join are included to fetch inlined relations rql = 'Any %s,%s %s' % (V, ','.join(var for attr, var in selected), ','.join(rql)) - execute = getattr(self.req, 'unsafe_execute', self.req.execute) + execute = getattr(self._cw, 'unsafe_execute', self._cw.execute) rset = execute(rql, {'x': self.eid}, 'x', build_descr=False)[0] # handle attributes for i in xrange(1, lastattr): @@ -638,9 +536,9 @@ value = rset[i] if value is None: rrset = ResultSet([], rql, {'x': self.eid}) - self.req.decorate_rset(rrset) + self._cw.decorate_rset(rrset) else: - rrset = self.req.eid_rset(value) + rrset = self._cw.eid_rset(value) self.set_related_cache(rtype, role, rrset) def get_value(self, name): @@ -658,7 +556,7 @@ rql = "Any A WHERE X eid %%(x)s, X %s A" % name # XXX should we really use unsafe_execute here? I think so (syt), # see #344874 - execute = getattr(self.req, 'unsafe_execute', self.req.execute) + execute = getattr(self._cw, 'unsafe_execute', self._cw.execute) try: rset = execute(rql, {'x': self.eid}, 'x') except Unauthorized: @@ -673,7 +571,7 @@ name, self.eid) if self.e_schema.destination(name) == 'String': # XXX (syt) imo emtpy string is better - self[name] = value = self.req._('unaccessible') + self[name] = value = self._cw._('unaccessible') else: self[name] = value = None return value @@ -693,13 +591,13 @@ rql = self.related_rql(rtype, role) # XXX should we really use unsafe_execute here? I think so (syt), # see #344874 - execute = getattr(self.req, 'unsafe_execute', self.req.execute) + execute = getattr(self._cw, 'unsafe_execute', self._cw.execute) rset = execute(rql, {'x': self.eid}, 'x') self.set_related_cache(rtype, role, rset) return self.related(rtype, role, limit, entities) def related_rql(self, rtype, role='subject', targettypes=None): - rschema = self.schema[rtype] + rschema = self._cw.vreg.schema[rtype] if role == 'subject': restriction = 'E eid %%(x)s, E %s X' % rtype if targettypes is None: @@ -717,14 +615,14 @@ if len(targettypes) > 1: fetchattrs_list = [] for ttype in targettypes: - etypecls = self.vreg['etypes'].etype_class(ttype) + etypecls = self._cw.vreg['etypes'].etype_class(ttype) fetchattrs_list.append(set(etypecls.fetch_attrs)) fetchattrs = reduce(set.intersection, fetchattrs_list) - rql = etypecls.fetch_rql(self.req.user, [restriction], fetchattrs, + rql = etypecls.fetch_rql(self._cw.user, [restriction], fetchattrs, settype=False) else: - etypecls = self.vreg['etypes'].etype_class(targettypes[0]) - rql = etypecls.fetch_rql(self.req.user, [restriction], settype=False) + etypecls = self._cw.vreg['etypes'].etype_class(targettypes[0]) + rql = etypecls.fetch_rql(self._cw.user, [restriction], settype=False) # optimisation: remove ORDERBY if cardinality is 1 or ? (though # greater_card return 1 for those both cases) if card == '1': @@ -748,7 +646,7 @@ """ ordermethod = ordermethod or 'fetch_unrelated_order' if isinstance(rtype, basestring): - rtype = self.schema.rschema(rtype) + rtype = self._cw.vreg.schema.rschema(rtype) if role == 'subject': evar, searchedvar = 'S', 'O' subjtype, objtype = self.e_schema, targettype @@ -766,29 +664,29 @@ restriction = [] args = {} securitycheck_args = {} - insertsecurity = (rtype.has_local_role('add') and not - rtype.has_perm(self.req, 'add', **securitycheck_args)) - constraints = rtype.rproperty(subjtype, objtype, 'constraints') + rdef = rtype.role_rdef(self.e_schema, targettype, role) + insertsecurity = (rdef.has_local_role('add') and not + rdef.has_perm(self._cw, 'add', **securitycheck_args)) # XXX consider constraint.mainvars to check if constraint apply if vocabconstraints: # RQLConstraint is a subclass for RQLVocabularyConstraint, so they # will be included as well - restriction += [cstr.restriction for cstr in constraints + restriction += [cstr.restriction for cstr in rdef.constraints if isinstance(cstr, RQLVocabularyConstraint)] else: - restriction += [cstr.restriction for cstr in constraints + restriction += [cstr.restriction for cstr in rdef.constraints if isinstance(cstr, RQLConstraint)] - etypecls = self.vreg['etypes'].etype_class(targettype) - rql = etypecls.fetch_rql(self.req.user, restriction, + etypecls = self._cw.vreg['etypes'].etype_class(targettype) + rql = etypecls.fetch_rql(self._cw.user, restriction, mainvar=searchedvar, ordermethod=ordermethod) # ensure we have an order defined if not ' ORDERBY ' in rql: before, after = rql.split(' WHERE ', 1) rql = '%s ORDERBY %s WHERE %s' % (before, searchedvar, after) if insertsecurity: - rqlexprs = rtype.get_rqlexprs('add') - rewriter = RQLRewriter(self.req) - rqlst = self.req.vreg.parse(self.req, rql, args) + rqlexprs = rdef.get_rqlexprs('add') + rewriter = RQLRewriter(self._cw) + rqlst = self._cw.vreg.parse(self._cw, rql, args) if not self.has_eid(): existant = searchedvar else: @@ -807,11 +705,11 @@ try: rql, args = self.unrelated_rql(rtype, targettype, role, ordermethod) except Unauthorized: - return self.req.empty_rset() + return self._cw.empty_rset() if limit is not None: before, after = rql.split(' WHERE ', 1) rql = '%s LIMIT %s WHERE %s' % (before, limit, after) - return self.req.execute(rql, args, tuple(args)) + return self._cw.execute(rql, args, tuple(args)) # relations cache handling ################################################ @@ -835,15 +733,13 @@ def set_related_cache(self, rtype, role, rset, col=0): """set cached values for the given relation""" if rset: - related = tuple(rset.entities(col)) - rschema = self.schema.rschema(rtype) + related = list(rset.entities(col)) + rschema = self._cw.vreg.schema.rschema(rtype) if role == 'subject': - rcard = rschema.rproperty(self.e_schema, related[0].e_schema, - 'cardinality')[1] + rcard = rschema.rdef(self.e_schema, related[0].e_schema).cardinality[1] target = 'object' else: - rcard = rschema.rproperty(related[0].e_schema, self.e_schema, - 'cardinality')[0] + rcard = rschema.rdef(related[0].e_schema, self.e_schema).cardinality[0] target = 'subject' if rcard in '?1': for rentity in related: @@ -885,10 +781,10 @@ # and now update the database kwargs['x'] = self.eid if _cw_unsafe: - self.req.unsafe_execute( + self._cw.unsafe_execute( 'SET %s WHERE X eid %%(x)s' % ','.join(relations), kwargs, 'x') else: - self.req.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations), + self._cw.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations), kwargs, 'x') def set_relations(self, _cw_unsafe=False, **kwargs): @@ -899,9 +795,9 @@ relations of the given type from or to this object should be deleted). """ if _cw_unsafe: - execute = self.req.unsafe_execute + execute = self._cw.unsafe_execute else: - execute = self.req.execute + execute = self._cw.execute # XXX update cache for attr, values in kwargs.iteritems(): if attr.startswith('reverse_'): @@ -920,7 +816,7 @@ def delete(self): assert self.has_eid(), self.eid - self.req.execute('DELETE %s X WHERE X eid %%(x)s' % self.e_schema, + self._cw.execute('DELETE %s X WHERE X eid %%(x)s' % self.e_schema, {'x': self.eid}) # server side utilities ################################################### @@ -939,10 +835,10 @@ """ # necessary since eid is handled specifically and yams require it to be # in the dictionary - if self.req is None: + if self._cw is None: _ = unicode else: - _ = self.req._ + _ = self._cw._ self.e_schema.check(self, creation=creation, _=_) def fti_containers(self, _done=None): @@ -979,7 +875,7 @@ """ from indexer.query_objects import tokenize # take care to cases where we're modyfying the schema - pending = self.req.transaction_data.setdefault('pendingrdefs', set()) + pending = self._cw.transaction_data.setdefault('pendingrdefs', set()) words = [] for rschema in self.e_schema.indexable_attributes(): if (self.e_schema, rschema) in pending: @@ -1004,20 +900,6 @@ words += entity.get_words() return words - @deprecated('[3.2] see new form api') - def vocabulary(self, rtype, role='subject', limit=None): - """vocabulary functions must return a list of couples - (label, eid) that will typically be used to fill the - edition view's combobox. - - If `eid` is None in one of these couples, it should be - interpreted as a separator in case vocabulary results are grouped - """ - from logilab.common.testlib import mock_object - form = self.vreg.select('forms', 'edition', self.req, entity=self) - field = mock_object(name=rtype, role=role) - return form.form_field_vocabulary(field, limit) - # attribute and relation descriptors ########################################## diff -r 15d541321a8c -r 74c1597f8a82 etwist/server.py --- a/etwist/server.py Wed Jan 20 10:13:02 2010 +0100 +++ b/etwist/server.py Wed Jan 20 10:13:45 2010 +0100 @@ -136,7 +136,7 @@ start_task(interval, self.appli.session_handler.clean_sessions) def set_url_rewriter(self): - self.url_rewriter = self.appli.vreg['components'].select_object('urlrewriter') + self.url_rewriter = self.appli.vreg['components'].select_or_none('urlrewriter') def shutdown_event(self): """callback fired when the server is shutting down to properly diff -r 15d541321a8c -r 74c1597f8a82 etwist/test/unittest_server.py --- a/etwist/test/unittest_server.py Wed Jan 20 10:13:02 2010 +0100 +++ b/etwist/test/unittest_server.py Wed Jan 20 10:13:45 2010 +0100 @@ -5,11 +5,11 @@ :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ -from cubicweb.devtools.apptest import EnvBasedTC +from cubicweb.devtools.testlib import CubicWebTC from cubicweb.etwist.server import host_prefixed_baseurl -class HostPrefixedBaseURLTC(EnvBasedTC): +class HostPrefixedBaseURLTC(CubicWebTC): def _check(self, baseurl, host, waited): self.assertEquals(host_prefixed_baseurl(baseurl, host), waited, diff -r 15d541321a8c -r 74c1597f8a82 etwist/twctl.py --- a/etwist/twctl.py Wed Jan 20 10:13:02 2010 +0100 +++ b/etwist/twctl.py Wed Jan 20 10:13:45 2010 +0100 @@ -8,7 +8,6 @@ import sys -from cubicweb import underline_title from cubicweb.toolsutils import CommandHandler from cubicweb.web.webctl import WebCreateHandler diff -r 15d541321a8c -r 74c1597f8a82 ext/rest.py --- a/ext/rest.py Wed Jan 20 10:13:02 2010 +0100 +++ b/ext/rest.py Wed Jan 20 10:13:45 2010 +0100 @@ -70,10 +70,10 @@ # Base URL mainly used by inliner.pep_reference; so this is correct: context = inliner.document.settings.context try: - refedentity = context.req.entity_from_eid(eid_num) + refedentity = context._cw.entity_from_eid(eid_num) except UnknownEid: ref = '#' - rest += u' ' + context.req._('(UNEXISTANT EID)') + rest += u' ' + context._cw._('(UNEXISTANT EID)') else: ref = refedentity.absolute_url() set_classes(options) @@ -206,7 +206,7 @@ :return: the data formatted as HTML or the original data if an error occured """ - req = context.req + req = context._cw if isinstance(data, unicode): encoding = 'unicode' # remove unprintable characters unauthorized in xml diff -r 15d541321a8c -r 74c1597f8a82 ext/test/unittest_rest.py --- a/ext/test/unittest_rest.py Wed Jan 20 10:13:02 2010 +0100 +++ b/ext/test/unittest_rest.py Wed Jan 20 10:13:45 2010 +0100 @@ -6,11 +6,11 @@ :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ from logilab.common.testlib import unittest_main -from cubicweb.devtools.apptest import EnvBasedTC +from cubicweb.devtools.testlib import CubicWebTC from cubicweb.ext.rest import rest_publish -class RestTC(EnvBasedTC): +class RestTC(CubicWebTC): def context(self): return self.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) diff -r 15d541321a8c -r 74c1597f8a82 goa/__init__.py --- a/goa/__init__.py Wed Jan 20 10:13:02 2010 +0100 +++ b/goa/__init__.py Wed Jan 20 10:13:45 2010 +0100 @@ -44,7 +44,7 @@ def rql_for_eid(eid): return 'Any X WHERE X eid "%s"' % eid - from cubicweb.common import uilib + from cubicweb import uilib uilib.rql_for_eid = rql_for_eid def typed_eid(eid): diff -r 15d541321a8c -r 74c1597f8a82 goa/appobjects/components.py --- a/goa/appobjects/components.py Wed Jan 20 10:13:02 2010 +0100 +++ b/goa/appobjects/components.py Wed Jan 20 10:13:45 2010 +0100 @@ -12,7 +12,7 @@ from cubicweb import typed_eid from cubicweb.selectors import one_line_rset, match_search_state, accept from cubicweb.schema import display_name -from cubicweb.common.view import StartupView, EntityView +from cubicweb.view import StartupView, EntityView from cubicweb.web import Redirect from cubicweb.web.views import vid_from_rset @@ -28,7 +28,7 @@ __select__ = one_line_rset() & match_search_state('linksearch') & accept def cell_call(self, row, col): - entity = self.entity(0, 0) + entity = self.rset.get_entity(0, 0) role, eid, rtype, etype = self.req.search_state[1] assert entity.eid == typed_eid(eid) rset = entity.unrelated(rtype, etype, role, ordermethod='fetch_order') diff -r 15d541321a8c -r 74c1597f8a82 goa/appobjects/dbmgmt.py --- a/goa/appobjects/dbmgmt.py Wed Jan 20 10:13:02 2010 +0100 +++ b/goa/appobjects/dbmgmt.py Wed Jan 20 10:13:45 2010 +0100 @@ -15,7 +15,7 @@ from logilab.mtconverter import xml_escape from cubicweb.selectors import none_rset, match_user_groups -from cubicweb.common.view import StartupView +from cubicweb.view import StartupView from cubicweb.web import Redirect from cubicweb.goa.dbinit import fix_entities, init_persistent_schema, insert_versions diff -r 15d541321a8c -r 74c1597f8a82 goa/appobjects/sessions.py --- a/goa/appobjects/sessions.py Wed Jan 20 10:13:02 2010 +0100 +++ b/goa/appobjects/sessions.py Wed Jan 20 10:13:45 2010 +0100 @@ -57,7 +57,7 @@ clear_cache(req, 'cursor') cnxprops = ConnectionProperties(self.vreg.config.repo_method, close=False, log=False) - cnx = repo_connect(self._repo, login, password, cnxprops=cnxprops) + cnx = repo_connect(self._repo, login, password=password, cnxprops=cnxprops) self._init_cnx(cnx, login, password) # associate the connection to the current request req.set_connection(cnx) @@ -73,9 +73,9 @@ class GAEPersistentSessionManager(AbstractSessionManager): """manage session data associated to a session identifier""" - def __init__(self, *args, **kwargs): - super(GAEPersistentSessionManager, self).__init__(*args, **kwargs) - self._repo = self.config.repository(vreg=self.vreg) + def __init__(self, vreg, *args, **kwargs): + super(GAEPersistentSessionManager, self).__init__(vreg, *args, **kwargs) + self._repo = self.config.repository(vreg=vreg) def get_session(self, req, sessionid): """return existing session for the given session identifier""" @@ -251,7 +251,7 @@ set_log_methods(ConnectionProxy, logging.getLogger('cubicweb.web.goa.session')) -from cubicweb.common.view import StartupView +from cubicweb.view import StartupView from cubicweb.web import application class SessionsCleaner(StartupView): diff -r 15d541321a8c -r 74c1597f8a82 goa/db.py --- a/goa/db.py Wed Jan 20 10:13:02 2010 +0100 +++ b/goa/db.py Wed Jan 20 10:13:45 2010 +0100 @@ -35,7 +35,8 @@ from logilab.common.decorators import cached, iclassmethod -from cubicweb import RequestSessionMixIn, Binary, entities +from cubicweb import Binary, entities +from cubicweb.req import RequestSessionBase from cubicweb.rset import ResultSet from cubicweb.entity import metaentity from cubicweb.server.utils import crypt_password @@ -92,7 +93,7 @@ def needrequest(wrapped): def wrapper(cls, *args, **kwargs): req = kwargs.pop('req', None) - if req is None and args and isinstance(args[0], RequestSessionMixIn): + if req is None and args and isinstance(args[0], RequestSessionBase): args = list(args) req = args.pop(0) if req is None: @@ -155,7 +156,7 @@ # # Entity prototype: # __init__(self, req, rset, row=None, col=0) - if args and isinstance(args[0], RequestSessionMixIn) or 'req' in kwargs: + if args and isinstance(args[0], RequestSessionBase) or 'req' in kwargs: super(Model, self).__init__(*args, **kwargs) self._gaeinitargs = None else: @@ -274,7 +275,7 @@ def view(self, vid, __registry='views', **kwargs): """shortcut to apply a view on this entity""" - return self.vreg[__registry]render(vid, self.req, rset=self.rset, + return self.vreg[__registry].render(vid, self.req, rset=self.rset, row=self.row, col=self.col, **kwargs) @classmethod diff -r 15d541321a8c -r 74c1597f8a82 goa/doc/quickstart.txt --- a/goa/doc/quickstart.txt Wed Jan 20 10:13:02 2010 +0100 +++ b/goa/doc/quickstart.txt Wed Jan 20 10:13:45 2010 +0100 @@ -1,3 +1,5 @@ +.. -*- coding: utf-8 -*- + Introduction ============= @@ -11,7 +13,7 @@ application. *result set* - objet qui encaspule les résultats d'une requête adressée à l'entrepôt + objet qui encaspule les résultats d'une requête adressée à l'entrepôt de données et des informations sur cette requête. *vue* @@ -23,8 +25,8 @@ Définition d'une application de Blog ==================================== -La première chose à faire est de copier le squelette depuis le répertoire -``lax/skel`` vers un nouveau répertoire qui sera votre application +La première chose à faire est de copier le squelette depuis le répertoire +``lax/skel`` vers un nouveau répertoire qui sera votre application ``Google AppEngine``:: $ cp -r lax/skel myapp @@ -36,7 +38,7 @@ données manipulées. La syntaxe de la définition est la même que celle proposée par `Google AppEngine`_ mais il faut remplacer la ligne d'import:: - + from google.appengine.ext import db par celle-ci:: @@ -47,7 +49,7 @@ Un exemple de schéma de données pour un ``Blog`` pourrait être:: from cubicweb.goa import db - + class Blog(db.Model): # un titre à donner à l'entrée title = db.StringProperty(required=True) @@ -56,15 +58,15 @@ # le contenu de l'entrée content = db.TextProperty() # une entrée peut en citer une autre - cites = db.SelfReferenceProperty() - + cites = db.SelfReferenceProperty() + Personnalisation des vues ------------------------- ``LAX`` permet de générer directement, à partir de la définition -du schéma, des vues de consultation, d'ajout et de modification -pour tous les types de donées manipulés. Il est toutefois +du schéma, des vues de consultation, d'ajout et de modification +pour tous les types de donées manipulés. Il est toutefois généralement souhaitable de personnaliser les vues de consultations. Dans ``LAX``, les vues sont représentées par des classes Python. @@ -74,7 +76,7 @@ - un identifiant (tous les objets dans ``LAX`` sont enregistrés dans un registre et cet identifiant sert de clé pour y retrouver la vue) - + - une description des types de données auxquels elle s'applique Il existe dans ``LAX`` des vues prédéfinies et utilisées par le moteur @@ -86,17 +88,17 @@ Par exemple, si on souhaite modifier la page principale d'une entrée de blog, il faut surcharger la vue ``primary`` des objets ``Blog`` dans le fichier ``myapp/views.py``:: - + from cubicweb.web.views import baseviews - + class BlogPrimaryView(baseviews.PrimaryView): accepts = ('Blog',) - + def cell_call(self, row, col): - entity = self.entity(row, col) + entity = self.rset.get_entity(row, col) self.w(u'

    %s

    ' % entity.title) self.w(u'
    %s
    ' entity.content) - + Génération du graphique de schéma --------------------------------- @@ -104,13 +106,13 @@ Il existe une vue ``schema`` qui permet d'afficher un graphique représantant les différents types d'entités définis dans le schéma ainsi que les relations entre ces types. Ce graphique doit être généré -statiquement. Le script à utiliser pour générer ce schéma est +statiquement. Le script à utiliser pour générer ce schéma est dans ``myapp/tools``. Ce script nécessite d'avoir accès aux bibliothèques fournies par le SDK de ``Google AppEngine``. Il faut donc modifier son PYTHONPATH:: $ export PYTHONPATH=GAE_ROOT/google:GAE_ROOT/lib/yaml - $ python tools/generate_schema_img.py + $ python tools/generate_schema_img.py Génération des fichiers de traduction diff -r 15d541321a8c -r 74c1597f8a82 goa/goactl.py --- a/goa/goactl.py Wed Jan 20 10:13:02 2010 +0100 +++ b/goa/goactl.py Wed Jan 20 10:13:45 2010 +0100 @@ -59,21 +59,21 @@ 'cwconfig.py', 'entity.py', 'interfaces.py', + 'i18n.py', + 'mail.py', + 'migration.py', + 'mixins.py', + 'mttransforms.py', 'rqlrewrite.py', 'rset.py', 'schema.py', 'schemaviewer.py', 'selectors.py', + 'uilib.py', 'utils.py', 'vregistry.py', 'view.py', - 'common/mail.py', - 'common/migration.py', - 'common/mixins.py', - 'common/mttransforms.py', - 'common/uilib.py', - 'ext/html4zope.py', 'ext/rest.py', @@ -117,7 +117,6 @@ 'web/httpcache.py', 'web/request.py', 'web/webconfig.py', - 'web/widgets.py', 'web/views/__init__.py', 'web/views/actions.py', @@ -166,7 +165,7 @@ OVERRIDEN_FILES = ( ('toolsutils.py', 'toolsutils.py'), - ('mttransforms.py', 'common/mttransforms.py'), + ('mttransforms.py', 'mttransforms.py'), ('server__init__.py', 'server/__init__.py'), ('rqlannotation.py', 'server/rqlannotation.py'), ) @@ -210,7 +209,6 @@ create_dir(split(target)[0]) create_symlink(join(CW_SOFTWARE_ROOT, fpath), target) # overriden files - create_init_file(join(appldir, 'cubicweb/common'), 'cubicweb.common') for fpath, subfpath in OVERRIDEN_FILES: create_symlink(join(CW_SOFTWARE_ROOT, 'goa', 'overrides', fpath), join(appldir, 'cubicweb', subfpath)) @@ -225,7 +223,7 @@ join(packagesdir, include)) # generate sample config from cubicweb.goa.goaconfig import GAEConfiguration - from cubicweb.common.migration import MigrationHelper + from cubicweb.migration import MigrationHelper config = GAEConfiguration(appid, appldir) if exists(config.main_config_file()): mih = MigrationHelper(config) diff -r 15d541321a8c -r 74c1597f8a82 goa/overrides/mttransforms.py --- a/goa/overrides/mttransforms.py Wed Jan 20 10:13:02 2010 +0100 +++ b/goa/overrides/mttransforms.py Wed Jan 20 10:13:45 2010 +0100 @@ -11,7 +11,7 @@ from logilab.mtconverter.engine import TransformEngine from logilab.mtconverter.transform import Transform -from cubicweb.common.uilib import rest_publish, html_publish, remove_html_tags +from cubicweb.uilib import rest_publish, html_publish, remove_html_tags HTML_MIMETYPES = ('text/html', 'text/xhtml', 'application/xhtml+xml') # CubicWeb specific transformations diff -r 15d541321a8c -r 74c1597f8a82 goa/skel/views.py --- a/goa/skel/views.py Wed Jan 20 10:13:02 2010 +0100 +++ b/goa/skel/views.py Wed Jan 20 10:13:45 2010 +0100 @@ -19,7 +19,7 @@ accepts = ('BlogEntry',) def cell_call(self, row, col): - entity = self.entity(row, col) + entity = self.rset.get_entity(row, col) self.w(u'

    %s

    ' % entity.dc_title()) entity.view('metadata', w=self.w) self.w(entity.printable_value('text')) diff -r 15d541321a8c -r 74c1597f8a82 goa/test/data/views.py --- a/goa/test/data/views.py Wed Jan 20 10:13:02 2010 +0100 +++ b/goa/test/data/views.py Wed Jan 20 10:13:45 2010 +0100 @@ -20,7 +20,7 @@ template.VariableNode.encode_output = encode_output -from cubicweb.common.view import StartupView +from cubicweb.view import StartupView INDEX_TEMPLATE = template.Template(u'''

    hellô {{ user.login }}

    diff -r 15d541321a8c -r 74c1597f8a82 goa/test/unittest_editcontroller.py --- a/goa/test/unittest_editcontroller.py Wed Jan 20 10:13:02 2010 +0100 +++ b/goa/test/unittest_editcontroller.py Wed Jan 20 10:13:45 2010 +0100 @@ -9,8 +9,8 @@ from urllib import unquote -from cubicweb.common import ValidationError -from cubicweb.common.uilib import rql_for_eid +from cubicweb import ValidationError +from cubicweb.uilib import rql_for_eid from cubicweb.web import INTERNAL_FIELD_VALUE, Redirect @@ -401,11 +401,11 @@ # which fires a Redirect # 2/ When re-publishing the copy form, the publisher implicitly commits try: - self.env.app.publish('edit', self.req) + self.app.publish('edit', self.req) except Redirect: self.req.form['rql'] = 'Any X WHERE X eid %s' % p.eid self.req.form['vid'] = 'copy' - self.env.app.publish('view', self.req) + self.app.publish('view', self.req) rset = self.req.execute('CWUser P WHERE P surname "Boom"') self.assertEquals(len(rset), 0) finally: diff -r 15d541321a8c -r 74c1597f8a82 goa/tools/laxctl.py --- a/goa/tools/laxctl.py Wed Jan 20 10:13:02 2010 +0100 +++ b/goa/tools/laxctl.py Wed Jan 20 10:13:45 2010 +0100 @@ -18,7 +18,7 @@ from logilab.common.clcommands import Command, register_commands, main_run -from cubicweb.common.uilib import remove_html_tags +from cubicweb.uilib import remove_html_tags from cubicweb.web.views.schema import SKIP_TYPES APPLROOT = osp.abspath(osp.join(osp.dirname(osp.abspath(__file__)), '..')) diff -r 15d541321a8c -r 74c1597f8a82 hooks/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hooks/__init__.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,1 @@ +"""core hooks""" diff -r 15d541321a8c -r 74c1597f8a82 hooks/bookmark.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hooks/bookmark.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,30 @@ +"""bookmark related hooks + +:organization: Logilab +:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +from cubicweb.server import hook + + +class AutoDeleteBookmarkOp(hook.Operation): + bookmark = None # make pylint happy + def precommit_event(self): + if not self.session.deleted_in_transaction(self.bookmark.eid): + if not self.bookmark.bookmarked_by: + self.bookmark.delete() + + +class DelBookmarkedByHook(hook.Hook): + """ensure user logins are stripped""" + __regid__ = 'autodelbookmark' + __select__ = hook.Hook.__select__ & hook.match_rtype('bookmarked_by',) + category = 'bookmark' + events = ('after_delete_relation',) + + def __call__(self): + AutoDeleteBookmarkOp(self._cw, + bookmark=self._cw.entity_from_eid(self.eidfrom)) diff -r 15d541321a8c -r 74c1597f8a82 hooks/email.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hooks/email.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,72 @@ +"""hooks to ensure use_email / primary_email relations consistency + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +from cubicweb.server import hook +from cubicweb.server.repository import ensure_card_respected + +from logilab.common.compat import any + + +class SetUseEmailRelationOp(hook.Operation): + """delay this operation to commit to avoid conflict with a late rql query + already setting the relation + """ + rtype = 'use_email' + entity = email = None # make pylint happy + + def condition(self): + """check entity has use_email set for the email address""" + return not any(e for e in self.entity.use_email + if self.email.eid == e.eid) + + def precommit_event(self): + if self.condition(): + # we've to handle cardinaly by ourselves since we're using unsafe_execute + # but use session.execute and not session.unsafe_execute to check we + # can change the relation + ensure_card_respected(self.session.execute, self.session, + self.entity.eid, self.rtype, self.email.eid) + self.session.unsafe_execute( + 'SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % self.rtype, + {'x': self.entity.eid, 'y': self.email.eid}, 'x') + + +class SetPrimaryEmailRelationOp(SetUseEmailRelationOp): + rtype = 'primary_email' + + def condition(self): + """check entity has no primary_email set""" + return not self.entity.primary_email + + +class SetPrimaryEmailHook(hook.Hook): + """notify when a bug or story or version has its state modified""" + __regid__ = 'setprimaryemail' + __select__ = hook.Hook.__select__ & hook.match_rtype('use_email') + category = 'email' + events = ('after_add_relation',) + + def __call__(self): + entity = self._cw.entity_from_eid(self.eidfrom) + if 'primary_email' in entity.e_schema.subject_relations(): + SetPrimaryEmailRelationOp(self._cw, entity=entity, + email=self._cw.entity_from_eid(self.eidto)) + +class SetUseEmailHook(hook.Hook): + """notify when a bug or story or version has its state modified""" + __regid__ = 'setprimaryemail' + __select__ = hook.Hook.__select__ & hook.match_rtype('primary_email') + category = 'email' + events = ('after_add_relation',) + + def __call__(self): + entity = self._cw.entity_from_eid(self.eidfrom) + if 'use_email' in entity.e_schema.subject_relations(): + SetUseEmailRelationOp(self._cw, entity=entity, + email=self._cw.entity_from_eid(self.eidto)) diff -r 15d541321a8c -r 74c1597f8a82 hooks/integrity.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hooks/integrity.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,281 @@ +"""Core hooks: check for data integrity according to the instance'schema +validity + +:organization: Logilab +:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +from cubicweb import ValidationError +from cubicweb.schema import RQLConstraint, RQLUniqueConstraint +from cubicweb.selectors import implements +from cubicweb.uilib import soup2xhtml +from cubicweb.server import hook + +# special relations that don't have to be checked for integrity, usually +# because they are handled internally by hooks (so we trust ourselves) +DONT_CHECK_RTYPES_ON_ADD = set(('owned_by', 'created_by', + 'is', 'is_instance_of', + 'wf_info_for', 'from_state', 'to_state')) +DONT_CHECK_RTYPES_ON_DEL = set(('is', 'is_instance_of', + 'wf_info_for', 'from_state', 'to_state')) + + +class _CheckRequiredRelationOperation(hook.LateOperation): + """checking relation cardinality has to be done after commit in + case the relation is being replaced + """ + eid, rtype = None, None + + def precommit_event(self): + # recheck pending eids + if self.session.deleted_in_transaction(self.eid): + return + if self.rtype in self.session.transaction_data.get('pendingrtypes', ()): + return + if self.session.unsafe_execute(*self._rql()).rowcount < 1: + etype = self.session.describe(self.eid)[0] + _ = self.session._ + msg = _('at least one relation %(rtype)s is required on %(etype)s (%(eid)s)') + msg %= {'rtype': _(self.rtype), 'etype': _(etype), 'eid': self.eid} + raise ValidationError(self.eid, {self.rtype: msg}) + + def commit_event(self): + pass + + def _rql(self): + raise NotImplementedError() + + +class _CheckSRelationOp(_CheckRequiredRelationOperation): + """check required subject relation""" + def _rql(self): + return 'Any O WHERE S eid %%(x)s, S %s O' % self.rtype, {'x': self.eid}, 'x' + + +class _CheckORelationOp(_CheckRequiredRelationOperation): + """check required object relation""" + def _rql(self): + return 'Any S WHERE O eid %%(x)s, S %s O' % self.rtype, {'x': self.eid}, 'x' + + +class IntegrityHook(hook.Hook): + __abstract__ = True + category = 'integrity' + +class UserIntegrityHook(IntegrityHook): + __abstract__ = True + __select__ = IntegrityHook.__select__ & hook.regular_session() + + +class CheckCardinalityHook(UserIntegrityHook): + """check cardinalities are satisfied""" + __regid__ = 'checkcard' + events = ('after_add_entity', 'before_delete_relation') + + def __call__(self): + getattr(self, self.event)() + + def checkrel_if_necessary(self, opcls, rtype, eid): + """check an equivalent operation has not already been added""" + for op in self._cw.pending_operations: + if isinstance(op, opcls) and op.rtype == rtype and op.eid == eid: + break + else: + opcls(self._cw, rtype=rtype, eid=eid) + + def after_add_entity(self): + eid = self.entity.eid + eschema = self.entity.e_schema + for rschema, targetschemas, role in eschema.relation_definitions(): + # skip automatically handled relations + if rschema.type in DONT_CHECK_RTYPES_ON_ADD: + continue + opcls = role == 'subject' and _CheckSRelationOp or _CheckORelationOp + rdef = rschema.role_rdef(eschema, targetschemas[0], role) + if rdef.role_cardinality(role) in '1+': + self.checkrel_if_necessary(opcls, rschema.type, eid) + + def before_delete_relation(self): + rtype = self.rtype + if rtype in DONT_CHECK_RTYPES_ON_DEL: + return + session = self._cw + eidfrom, eidto = self.eidfrom, self.eidto + card = session.schema_rproperty(rtype, eidfrom, eidto, 'cardinality') + pendingrdefs = session.transaction_data.get('pendingrdefs', ()) + if (session.describe(eidfrom)[0], rtype, session.describe(eidto)[0]) in pendingrdefs: + return + if card[0] in '1+' and not session.deleted_in_transaction(eidfrom): + self.checkrel_if_necessary(_CheckSRelationOp, rtype, eidfrom) + if card[1] in '1+' and not session.deleted_in_transaction(eidto): + self.checkrel_if_necessary(_CheckORelationOp, rtype, eidto) + + +class _CheckConstraintsOp(hook.LateOperation): + """check a new relation satisfy its constraints + """ + def precommit_event(self): + eidfrom, rtype, eidto = self.rdef + # first check related entities have not been deleted in the same + # transaction + if self.session.deleted_in_transaction(eidfrom): + return + if self.session.deleted_in_transaction(eidto): + return + for constraint in self.constraints: + try: + constraint.repo_check(self.session, eidfrom, rtype, eidto) + except NotImplementedError: + self.critical('can\'t check constraint %s, not supported', + constraint) + + def commit_event(self): + pass + + +class CheckConstraintHook(UserIntegrityHook): + """check the relation satisfy its constraints + + this is delayed to a precommit time operation since other relation which + will make constraint satisfied (or unsatisfied) may be added later. + """ + __regid__ = 'checkconstraint' + events = ('after_add_relation',) + + def __call__(self): + # XXX get only RQL[Unique]Constraints? + constraints = self._cw.schema_rproperty(self.rtype, self.eidfrom, self.eidto, + 'constraints') + if constraints: + _CheckConstraintsOp(self._cw, constraints=constraints, + rdef=(self.eidfrom, self.rtype, self.eidto)) + + +class CheckAttributeConstraintHook(UserIntegrityHook): + """check the attribute relation satisfy its constraints + + this is delayed to a precommit time operation since other relation which + will make constraint satisfied (or unsatisfied) may be added later. + """ + __regid__ = 'checkattrconstraint' + events = ('after_add_entity', 'after_update_entity') + + def __call__(self): + eschema = self.entity.e_schema + for attr in self.entity.edited_attributes: + if eschema.subjrels[attr].final: + constraints = [c for c in eschema.rdef(attr).constraints + if isinstance(c, (RQLUniqueConstraint, RQLConstraint))] + if constraints: + _CheckConstraintsOp(self._cw, constraints=constraints, + rdef=(self.entity.eid, attr, None)) + + +class CheckUniqueHook(UserIntegrityHook): + __regid__ = 'checkunique' + events = ('before_add_entity', 'before_update_entity') + + def __call__(self): + entity = self.entity + eschema = entity.e_schema + for attr in entity.edited_attributes: + if eschema.subjrels[attr].final and eschema.has_unique_values(attr): + val = entity[attr] + if val is None: + continue + rql = '%s X WHERE X %s %%(val)s' % (entity.e_schema, attr) + rset = self._cw.unsafe_execute(rql, {'val': val}) + if rset and rset[0][0] != entity.eid: + msg = self._cw._('the value "%s" is already used, use another one') + raise ValidationError(entity.eid, {attr: msg % val}) + + +class _DelayedDeleteOp(hook.Operation): + """delete the object of composite relation except if the relation + has actually been redirected to another composite + """ + + def precommit_event(self): + session = self.session + # don't do anything if the entity is being created or deleted + if not (session.deleted_in_transaction(self.eid) or + session.added_in_transaction(self.eid)): + etype = session.describe(self.eid)[0] + session.unsafe_execute('DELETE %s X WHERE X eid %%(x)s, NOT %s' + % (etype, self.relation), + {'x': self.eid}, 'x') + + +class DeleteCompositeOrphanHook(IntegrityHook): + """delete the composed of a composite relation when this relation is deleted + """ + __regid__ = 'deletecomposite' + events = ('before_delete_relation',) + + def __call__(self): + # if the relation is being delete, don't delete composite's components + # automatically + pendingrdefs = self._cw.transaction_data.get('pendingrdefs', ()) + if (self._cw.describe(self.eidfrom)[0], self.rtype, + self._cw.describe(self.eidto)[0]) in pendingrdefs: + return + composite = self._cw.schema_rproperty(self.rtype, self.eidfrom, self.eidto, + 'composite') + if composite == 'subject': + _DelayedDeleteOp(self._cw, eid=self.eidto, + relation='Y %s X' % self.rtype) + elif composite == 'object': + _DelayedDeleteOp(self._cw, eid=self.eidfrom, + relation='X %s Y' % self.rtype) + + +class DontRemoveOwnersGroupHook(IntegrityHook): + """delete the composed of a composite relation when this relation is deleted + """ + __regid__ = 'checkownersgroup' + __select__ = IntegrityHook.__select__ & implements('CWGroup') + events = ('before_delete_entity', 'before_update_entity') + + def __call__(self): + if self.event == 'before_delete_entity' and self.entity.name == 'owners': + raise ValidationError(self.entity.eid, {None: self._cw._('can\'t be deleted')}) + elif self.event == 'before_update_entity' and 'name' in self.entity.edited_attributes: + newname = self.entity.pop('name') + oldname = self.entity.name + if oldname == 'owners' and newname != oldname: + raise ValidationError(self.entity.eid, {'name': self._cw._('can\'t be changed')}) + self.entity['name'] = newname + + +class TidyHtmlFields(UserIntegrityHook): + """tidy HTML in rich text strings""" + __regid__ = 'htmltidy' + events = ('before_add_entity', 'before_update_entity') + + def __call__(self): + entity = self.entity + metaattrs = entity.e_schema.meta_attributes() + for metaattr, (metadata, attr) in metaattrs.iteritems(): + if metadata == 'format' and attr in entity.edited_attributes: + try: + value = entity[attr] + except KeyError: + continue # no text to tidy + if isinstance(value, unicode): # filter out None and Binary + if getattr(entity, str(metaattr)) == 'text/html': + entity[attr] = soup2xhtml(value, self._cw.encoding) + + +class StripCWUserLoginHook(IntegrityHook): + """ensure user logins are stripped""" + __regid__ = 'stripuserlogin' + __select__ = IntegrityHook.__select__ & implements('CWUser') + events = ('before_add_entity', 'before_update_entity',) + + def __call__(self): + user = self.entity + if 'login' in user.edited_attributes and user.login: + user.login = user.login.strip() diff -r 15d541321a8c -r 74c1597f8a82 hooks/metadata.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hooks/metadata.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,166 @@ +"""Core hooks: set generic metadata + +:organization: Logilab +:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + + +from datetime import datetime + +from cubicweb.selectors import implements +from cubicweb.server import hook +from cubicweb.server.repository import FTIndexEntityOp + + +def eschema_type_eid(session, etype): + """get eid of the CWEType entity for the given yams type""" + eschema = session.repo.schema.eschema(etype) + # eschema.eid is None if schema has been readen from the filesystem, not + # from the database (eg during tests) + if eschema.eid is None: + eschema.eid = session.unsafe_execute( + 'Any X WHERE X is CWEType, X name %(name)s', + {'name': str(etype)})[0][0] + return eschema.eid + + +class MetaDataHook(hook.Hook): + __abstract__ = True + category = 'metadata' + + +class InitMetaAttrsHook(MetaDataHook): + """before create a new entity -> set creation and modification date + + this is a conveniency hook, you shouldn't have to disable it + """ + __regid__ = 'metaattrsinit' + events = ('before_add_entity',) + + def __call__(self): + timestamp = datetime.now() + self.entity.setdefault('creation_date', timestamp) + self.entity.setdefault('modification_date', timestamp) + if not self._cw.get_shared_data('do-not-insert-cwuri'): + cwuri = u'%seid/%s' % (self._cw.base_url(), self.entity.eid) + self.entity.setdefault('cwuri', cwuri) + + +class UpdateMetaAttrsHook(MetaDataHook): + """update an entity -> set modification date""" + __regid__ = 'metaattrsupdate' + events = ('before_update_entity',) + + def __call__(self): + self.entity.setdefault('modification_date', datetime.now()) + + +class _SetCreatorOp(hook.Operation): + + def precommit_event(self): + session = self.session + if session.deleted_in_transaction(self.entity.eid): + # entity have been created and deleted in the same transaction + return + if not self.entity.created_by: + session.add_relation(self.entity.eid, 'created_by', session.user.eid) + + +class SetIsHook(MetaDataHook): + """create a new entity -> set is relation""" + __regid__ = 'setis' + events = ('after_add_entity',) + + def __call__(self): + if hasattr(self.entity, '_cw_recreating'): + return + session = self._cw + entity = self.entity + try: + #session.add_relation(entity.eid, 'is', + # eschema_type_eid(session, entity.__regid__)) + session.system_sql('INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)' + % (entity.eid, eschema_type_eid(session, entity.__regid__))) + except IndexError: + # during schema serialization, skip + return + for etype in entity.e_schema.ancestors() + [entity.e_schema]: + #session.add_relation(entity.eid, 'is_instance_of', + # eschema_type_eid(session, etype)) + session.system_sql('INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)' + % (entity.eid, eschema_type_eid(session, etype))) + + +class SetOwnershipHook(MetaDataHook): + """create a new entity -> set owner and creator metadata""" + __regid__ = 'setowner' + events = ('after_add_entity',) + + def __call__(self): + asession = self._cw.actual_session() + if not asession.is_internal_session: + self._cw.add_relation(self.entity.eid, 'owned_by', asession.user.eid) + _SetCreatorOp(asession, entity=self.entity) + + +class _SyncOwnersOp(hook.Operation): + def precommit_event(self): + self.session.unsafe_execute('SET X owned_by U WHERE C owned_by U, C eid %(c)s,' + 'NOT EXISTS(X owned_by U, X eid %(x)s)', + {'c': self.compositeeid, 'x': self.composedeid}, + ('c', 'x')) + + +class SyncCompositeOwner(MetaDataHook): + """when adding composite relation, the composed should have the same owners + has the composite + """ + __regid__ = 'synccompositeowner' + events = ('after_add_relation',) + + def __call__(self): + if self.rtype == 'wf_info_for': + # skip this special composite relation # XXX (syt) why? + return + eidfrom, eidto = self.eidfrom, self.eidto + composite = self._cw.schema_rproperty(self.rtype, eidfrom, eidto, 'composite') + if composite == 'subject': + _SyncOwnersOp(self._cw, compositeeid=eidfrom, composedeid=eidto) + elif composite == 'object': + _SyncOwnersOp(self._cw, compositeeid=eidto, composedeid=eidfrom) + + +class FixUserOwnershipHook(MetaDataHook): + """when a user has been created, add owned_by relation on itself""" + __regid__ = 'fixuserowner' + __select__ = MetaDataHook.__select__ & implements('CWUser') + events = ('after_add_entity',) + + def __call__(self): + self._cw.add_relation(self.entity.eid, 'owned_by', self.entity.eid) + + +class UpdateFTIHook(MetaDataHook): + """sync fulltext index when relevant relation is added / removed + """ + __regid__ = 'updateftirel' + events = ('after_add_relation', 'after_delete_relation') + + def __call__(self): + rtype = self.rtype + session = self._cw + if self.event == 'after_add_relation': + # Reindexing the contained entity is enough since it will implicitly + # reindex the container entity. + ftcontainer = session.vreg.schema.rschema(rtype).fulltext_container + if ftcontainer == 'subject': + FTIndexEntityOp(session, entity=session.entity_from_eid(self.eidto)) + elif ftcontainer == 'object': + FTIndexEntityOp(session, entity=session.entity_from_eid(self.eidfrom)) + elif session.repo.schema.rschema(rtype).fulltext_container: + FTIndexEntityOp(session, entity=session.entity_from_eid(self.eidto)) + FTIndexEntityOp(session, entity=session.entity_from_eid(self.eidfrom)) + diff -r 15d541321a8c -r 74c1597f8a82 hooks/notification.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hooks/notification.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,185 @@ +"""some hooks to handle notification on entity's changes + +:organization: Logilab +:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +from logilab.common.textutils import normalize_text + +from cubicweb import RegistryException +from cubicweb.selectors import implements +from cubicweb.server import hook +from cubicweb.sobjects.supervising import SupervisionMailOp + +class RenderAndSendNotificationView(hook.Operation): + """delay rendering of notification view until precommit""" + def precommit_event(self): + view = self.view + if view.cw_rset is not None and not view.cw_rset: + return # entity added and deleted in the same transaction (cache effect) + if view.cw_rset and self.session.deleted_in_transaction(view.cw_rset[view.cw_row or 0][view.cw_col or 0]): + return # entity added and deleted in the same transaction + self.view.render_and_send(**getattr(self, 'viewargs', {})) + + +class NotificationHook(hook.Hook): + __abstract__ = True + category = 'notification' + + def select_view(self, vid, rset, row=0, col=0): + return self._cw.vreg['views'].select_or_none(vid, self._cw, + rset=rset, row=0, col=0) + + +class StatusChangeHook(NotificationHook): + """notify when a workflowable entity has its state modified""" + __regid__ = 'notifystatuschange' + __select__ = NotificationHook.__select__ & implements('TrInfo') + events = ('after_add_entity',) + + def __call__(self): + entity = self.entity + if not entity.from_state: # not a transition + return + rset = entity.related('wf_info_for') + view = self.select_view('notif_status_change', rset=rset, row=0) + if view is None: + return + comment = entity.printable_value('comment', format='text/plain') + # XXX don't try to wrap rest until we've a proper transformation (see + # #103822) + if comment and entity.comment_format != 'text/rest': + comment = normalize_text(comment, 80) + RenderAndSendNotificationView(self._cw, view=view, viewargs={ + 'comment': comment, 'previous_state': entity.previous_state.name, + 'current_state': entity.new_state.name}) + + +class RelationChangeHook(NotificationHook): + __regid__ = 'notifyrelationchange' + events = ('before_add_relation', 'after_add_relation', + 'before_delete_relation', 'after_delete_relation') + + def __call__(self): + """if a notification view is defined for the event, send notification + email defined by the view + """ + rset = self._cw.eid_rset(self.eidfrom) + view = self.select_view('notif_%s_%s' % (self.event, self.rtype), + rset=rset, row=0) + if view is None: + return + RenderAndSendNotificationView(self._cw, view=view) + + +class EntityChangeHook(NotificationHook): + """if a notification view is defined for the event, send notification + email defined by the view + """ + __regid__ = 'notifyentitychange' + events = ('after_add_entity', 'after_update_entity') + + def __call__(self): + rset = self.entity.as_rset() + view = self.select_view('notif_%s' % self.event, rset=rset, row=0) + if view is None: + return + RenderAndSendNotificationView(self._cw, view=view) + + +class EntityUpdatedNotificationOp(hook.SingleLastOperation): + + def precommit_event(self): + session = self.session + for eid in session.transaction_data['changes']: + view = session.vreg['views'].select('notif_entity_updated', session, + rset=session.eid_rset(eid), + row=0) + RenderAndSendNotificationView(session, view=view) + + +class EntityUpdateHook(NotificationHook): + __regid__ = 'notifentityupdated' + __abstract__ = True # do not register by default + + events = ('before_update_entity',) + skip_attrs = set() + + def __call__(self): + session = self._cw + if self.entity.eid in session.transaction_data.get('neweids', ()): + return # entity is being created + if session.is_super_session: + return # ignore changes triggered by hooks + # then compute changes + changes = session.transaction_data.setdefault('changes', {}) + thisentitychanges = changes.setdefault(self.entity.eid, set()) + attrs = [k for k in self.entity.edited_attributes if not k in self.skip_attrs] + if not attrs: + return + rqlsel, rqlrestr = [], ['X eid %(x)s'] + for i, attr in enumerate(attrs): + var = chr(65+i) + rqlsel.append(var) + rqlrestr.append('X %s %s' % (attr, var)) + rql = 'Any %s WHERE %s' % (','.join(rqlsel), ','.join(rqlrestr)) + rset = session.execute(rql, {'x': self.entity.eid}, 'x') + for i, attr in enumerate(attrs): + oldvalue = rset[0][i] + newvalue = self.entity[attr] + if oldvalue != newvalue: + thisentitychanges.add((attr, oldvalue, newvalue)) + if thisentitychanges: + EntityUpdatedNotificationOp(session) + + +# supervising ################################################################## + +class SomethingChangedHook(NotificationHook): + __regid__ = 'supervising' + events = ('before_add_relation', 'before_delete_relation', + 'after_add_entity', 'before_update_entity') + + def __call__(self): + # XXX use proper selectors + if self._cw.is_super_session or self._cw.repo.config.repairing: + return # ignore changes triggered by hooks or maintainance shell + dest = self._cw.vreg.config['supervising-addrs'] + if not dest: # no supervisors, don't do this for nothing... + return + if self._call(): + SupervisionMailOp(self._cw) + + def _call(self): + event = self.event.split('_', 1)[1] + if event == 'update_entity': + if self._cw.added_in_transaction(self.entity.eid): + return False + if self.entity.e_schema == 'CWUser': + if not (self.entity.edited_attributes - frozenset(('eid', 'modification_date', + 'last_login_time'))): + # don't record last_login_time update which are done + # automatically at login time + return False + self._cw.transaction_data.setdefault('pendingchanges', []).append( + (event, self)) + return True + + +class EntityDeleteHook(SomethingChangedHook): + __regid__ = 'supervisingentitydel' + events = ('before_delete_entity',) + + def _call(self): + try: + title = self.entity.dc_title() + except: + # may raise an error during deletion process, for instance due to + # missing required relation + title = '#%s' % eid + self._cw.transaction_data.setdefault('pendingchanges', []).append( + ('delete_entity', (self.entity.eid, str(self.entity.e_schema), title))) + return True diff -r 15d541321a8c -r 74c1597f8a82 hooks/security.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hooks/security.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,139 @@ +"""Security hooks: check permissions to add/delete/update entities according to +the user connected to a session + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +from cubicweb import Unauthorized +from cubicweb.server import BEFORE_ADD_RELATIONS, ON_COMMIT_ADD_RELATIONS, hook + + +def check_entity_attributes(session, entity): + eid = entity.eid + eschema = entity.e_schema + # ._default_set is only there on entity creation to indicate unspecified + # attributes which has been set to a default value defined in the schema + defaults = getattr(entity, '_default_set', ()) + try: + editedattrs = entity.edited_attributes + except AttributeError: + editedattrs = entity + for attr in editedattrs: + if attr in defaults: + continue + rdef = eschema.rdef(attr) + if rdef.final: # non final relation are checked by other hooks + # add/delete should be equivalent (XXX: unify them into 'update' ?) + rdef.check_perm(session, 'add', eid=eid) + + +class _CheckEntityPermissionOp(hook.LateOperation): + def precommit_event(self): + #print 'CheckEntityPermissionOp', self.session.user, self.entity, self.action + self.entity.check_perm(self.action) + check_entity_attributes(self.session, self.entity) + + def commit_event(self): + pass + + +class _CheckRelationPermissionOp(hook.LateOperation): + def precommit_event(self): + rdef = self.rschema.rdef(self.session.describe(self.eidfrom)[0], + self.session.describe(self.eidto)[0]) + rdef.check_perm(self.session, self.action, + fromeid=self.eidfrom, toeid=self.eidto) + + def commit_event(self): + pass + + +class SecurityHook(hook.Hook): + __abstract__ = True + category = 'security' + __select__ = hook.Hook.__select__ & hook.regular_session() + + +class AfterAddEntitySecurityHook(SecurityHook): + __regid__ = 'securityafteraddentity' + events = ('after_add_entity',) + + def __call__(self): + _CheckEntityPermissionOp(self._cw, entity=self.entity, action='add') + + +class AfterUpdateEntitySecurityHook(SecurityHook): + __regid__ = 'securityafterupdateentity' + events = ('after_update_entity',) + + def __call__(self): + try: + # check user has permission right now, if not retry at commit time + self.entity.check_perm('update') + check_entity_attributes(self._cw, self.entity) + except Unauthorized: + self.entity.clear_local_perm_cache('update') + _CheckEntityPermissionOp(self._cw, entity=self.entity, action='update') + + +class BeforeDelEntitySecurityHook(SecurityHook): + __regid__ = 'securitybeforedelentity' + events = ('before_delete_entity',) + + def __call__(self): + self.entity.check_perm('delete') + + +class BeforeAddRelationSecurityHook(SecurityHook): + __regid__ = 'securitybeforeaddrelation' + events = ('before_add_relation',) + + def __call__(self): + if self.rtype in BEFORE_ADD_RELATIONS: + nocheck = self._cw.transaction_data.get('skip-security', ()) + if (self.eidfrom, self.rtype, self.eidto) in nocheck: + return + rschema = self._cw.repo.schema[self.rtype] + rdef = rschema.rdef(self._cw.describe(self.eidfrom)[0], + self._cw.describe(self.eidto)[0]) + rdef.check_perm(self._cw, 'add', fromeid=self.eidfrom, toeid=self.eidto) + + +class AfterAddRelationSecurityHook(SecurityHook): + __regid__ = 'securityafteraddrelation' + events = ('after_add_relation',) + + def __call__(self): + if not self.rtype in BEFORE_ADD_RELATIONS: + nocheck = self._cw.transaction_data.get('skip-security', ()) + if (self.eidfrom, self.rtype, self.eidto) in nocheck: + return + rschema = self._cw.repo.schema[self.rtype] + if self.rtype in ON_COMMIT_ADD_RELATIONS: + _CheckRelationPermissionOp(self._cw, action='add', + rschema=rschema, + eidfrom=self.eidfrom, + eidto=self.eidto) + else: + rdef = rschema.rdef(self._cw.describe(self.eidfrom)[0], + self._cw.describe(self.eidto)[0]) + rdef.check_perm(self._cw, 'add', fromeid=self.eidfrom, toeid=self.eidto) + + +class BeforeDeleteRelationSecurityHook(SecurityHook): + __regid__ = 'securitybeforedelrelation' + events = ('before_delete_relation',) + + def __call__(self): + nocheck = self._cw.transaction_data.get('skip-security', ()) + if (self.eidfrom, self.rtype, self.eidto) in nocheck: + return + rschema = self._cw.repo.schema[self.rtype] + rdef = rschema.rdef(self._cw.describe(self.eidfrom)[0], + self._cw.describe(self.eidto)[0]) + rdef.check_perm(self._cw, 'delete', fromeid=self.eidfrom, toeid=self.eidto) + diff -r 15d541321a8c -r 74c1597f8a82 hooks/syncschema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hooks/syncschema.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,1150 @@ +"""schema hooks: + +- synchronize the living schema object with the persistent schema +- perform physical update on the source when necessary + +checking for schema consistency is done in hooks.py + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +from yams.schema import BASE_TYPES, RelationSchema, RelationDefinitionSchema +from yams.buildobjs import EntityType, RelationType, RelationDefinition +from yams.schema2sql import eschema2sql, rschema2sql, type_from_constraints + +from logilab.common.decorators import clear_cache + +from cubicweb import ValidationError, RepositoryError +from cubicweb.selectors import implements +from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES, CONSTRAINTS +from cubicweb.server import hook, schemaserial as ss +from cubicweb.server.sqlutils import SQL_PREFIX + + +TYPE_CONVERTER = { # XXX + 'Boolean': bool, + 'Int': int, + 'Float': float, + 'Password': str, + 'String': unicode, + 'Date' : unicode, + 'Datetime' : unicode, + 'Time' : unicode, + } + +# core entity and relation types which can't be removed +CORE_ETYPES = list(BASE_TYPES) + ['CWEType', 'CWRType', 'CWUser', 'CWGroup', + 'CWConstraint', 'CWAttribute', 'CWRelation'] +CORE_RTYPES = ['eid', 'creation_date', 'modification_date', 'cwuri', + 'login', 'upassword', 'name', + 'is', 'instanceof', 'owned_by', 'created_by', 'in_group', + 'relation_type', 'from_entity', 'to_entity', + 'constrainted_by', + 'read_permission', 'add_permission', + 'delete_permission', 'updated_permission', + ] + +def get_constraints(session, entity): + constraints = [] + for cstreid in session.transaction_data.get(entity.eid, ()): + cstrent = session.entity_from_eid(cstreid) + cstr = CONSTRAINTS[cstrent.type].deserialize(cstrent.value) + cstr.eid = cstreid + constraints.append(cstr) + return constraints + +def group_mapping(cw): + try: + return cw.transaction_data['groupmap'] + except KeyError: + cw.transaction_data['groupmap'] = gmap = ss.group_mapping(cw) + return gmap + +def add_inline_relation_column(session, etype, rtype): + """add necessary column and index for an inlined relation""" + table = SQL_PREFIX + etype + column = SQL_PREFIX + rtype + try: + session.system_sql(str('ALTER TABLE %s ADD COLUMN %s integer' + % (table, column)), rollback_on_failure=False) + session.info('added column %s to table %s', column, table) + except: + # silent exception here, if this error has not been raised because the + # column already exists, index creation will fail anyway + session.exception('error while adding column %s to table %s', + table, column) + # create index before alter table which may expectingly fail during test + # (sqlite) while index creation should never fail (test for index existence + # is done by the dbhelper) + session.pool.source('system').create_index(session, table, column) + session.info('added index on %s(%s)', table, column) + session.transaction_data.setdefault('createdattrs', []).append( + '%s.%s' % (etype, rtype)) + +def check_valid_changes(session, entity, ro_attrs=('name', 'final')): + errors = {} + # don't use getattr(entity, attr), we would get the modified value if any + for attr in entity.edited_attributes: + if attr in ro_attrs: + newval = entity.pop(attr) + origval = getattr(entity, attr) + if newval != origval: + errors[attr] = session._("can't change the %s attribute") % \ + display_name(session, attr) + entity[attr] = newval + if errors: + raise ValidationError(entity.eid, errors) + + +# operations for low-level database alteration ################################ + +class DropTable(hook.Operation): + """actually remove a database from the instance's schema""" + table = None # make pylint happy + def precommit_event(self): + dropped = self.session.transaction_data.setdefault('droppedtables', + set()) + if self.table in dropped: + return # already processed + dropped.add(self.table) + self.session.system_sql('DROP TABLE %s' % self.table) + self.info('dropped table %s', self.table) + + +class DropRelationTable(DropTable): + def __init__(self, session, rtype): + super(DropRelationTable, self).__init__( + session, table='%s_relation' % rtype) + session.transaction_data.setdefault('pendingrtypes', set()).add(rtype) + + +class DropColumn(hook.Operation): + """actually remove the attribut's column from entity table in the system + database + """ + table = column = None # make pylint happy + def precommit_event(self): + session, table, column = self.session, self.table, self.column + # drop index if any + session.pool.source('system').drop_index(session, table, column) + try: + session.system_sql('ALTER TABLE %s DROP COLUMN %s' + % (table, column), rollback_on_failure=False) + self.info('dropped column %s from table %s', column, table) + except Exception, ex: + # not supported by sqlite for instance + self.error('error while altering table %s: %s', table, ex) + + +# base operations for in-memory schema synchronization ######################## + +class MemSchemaNotifyChanges(hook.SingleLastOperation): + """the update schema operation: + + special operation which should be called once and after all other schema + operations. It will trigger internal structures rebuilding to consider + schema changes. + """ + + def __init__(self, session): + hook.SingleLastOperation.__init__(self, session) + + def precommit_event(self): + for eschema in self.session.repo.schema.entities(): + if not eschema.final: + clear_cache(eschema, 'ordered_relations') + + def commit_event(self): + rebuildinfered = self.session.data.get('rebuild-infered', True) + repo = self.session.repo + repo.set_schema(repo.schema, rebuildinfered=rebuildinfered) + # CWUser class might have changed, update current session users + cwuser_cls = self.session.vreg['etypes'].etype_class('CWUser') + for session in repo._sessions.values(): + session.user.__class__ = cwuser_cls + + def rollback_event(self): + self.precommit_event() + + +class MemSchemaOperation(hook.Operation): + """base class for schema operations""" + def __init__(self, session, kobj=None, **kwargs): + self.kobj = kobj + # once Operation.__init__ has been called, event may be triggered, so + # do this last ! + hook.Operation.__init__(self, session, **kwargs) + # every schema operation is triggering a schema update + MemSchemaNotifyChanges(session) + + def prepare_constraints(self, subjtype, rtype, objtype): + rdef = rtype.rdef(subjtype, objtype) + constraints = rdef.constraints + self.constraints = list(constraints) + rdef.constraints = self.constraints + + +class MemSchemaEarlyOperation(MemSchemaOperation): + def insert_index(self): + """schema operation which are inserted at the begining of the queue + (typically to add/remove entity or relation types) + """ + i = -1 + for i, op in enumerate(self.session.pending_operations): + if not isinstance(op, MemSchemaEarlyOperation): + return i + return i + 1 + + +# operations for high-level source database alteration ######################## + +class SourceDbCWETypeRename(hook.Operation): + """this operation updates physical storage accordingly""" + oldname = newname = None # make pylint happy + + def precommit_event(self): + # we need sql to operate physical changes on the system database + sqlexec = self.session.system_sql + sqlexec('ALTER TABLE %s%s RENAME TO %s%s' % (SQL_PREFIX, self.oldname, + SQL_PREFIX, self.newname)) + self.info('renamed table %s to %s', self.oldname, self.newname) + sqlexec('UPDATE entities SET type=%s WHERE type=%s', + (self.newname, self.oldname)) + sqlexec('UPDATE deleted_entities SET type=%s WHERE type=%s', + (self.newname, self.oldname)) + + +class SourceDbCWRTypeUpdate(hook.Operation): + """actually update some properties of a relation definition""" + rschema = values = entity = None # make pylint happy + + def precommit_event(self): + session = self.session + rschema = self.rschema + if rschema.final or not 'inlined' in self.values: + return # nothing to do + inlined = self.values['inlined'] + entity = self.entity + # check in-lining is necessary / possible + if not entity.inlined_changed(inlined): + return # nothing to do + # inlined changed, make necessary physical changes! + sqlexec = self.session.system_sql + rtype = rschema.type + eidcolumn = SQL_PREFIX + 'eid' + if not inlined: + # need to create the relation if it has not been already done by + # another event of the same transaction + if not rschema.type in session.transaction_data.get('createdtables', ()): + tablesql = rschema2sql(rschema) + # create the necessary table + for sql in tablesql.split(';'): + if sql.strip(): + sqlexec(sql) + session.transaction_data.setdefault('createdtables', []).append( + rschema.type) + # copy existant data + column = SQL_PREFIX + rtype + for etype in rschema.subjects(): + table = SQL_PREFIX + str(etype) + sqlexec('INSERT INTO %s_relation SELECT %s, %s FROM %s WHERE NOT %s IS NULL' + % (rtype, eidcolumn, column, table, column)) + # drop existant columns + for etype in rschema.subjects(): + DropColumn(session, table=SQL_PREFIX + str(etype), + column=SQL_PREFIX + rtype) + else: + for etype in rschema.subjects(): + try: + add_inline_relation_column(session, str(etype), rtype) + except Exception, ex: + # the column probably already exists. this occurs when the + # entity's type has just been added or if the column has not + # been previously dropped + self.error('error while altering table %s: %s', etype, ex) + # copy existant data. + # XXX don't use, it's not supported by sqlite (at least at when i tried it) + #sqlexec('UPDATE %(etype)s SET %(rtype)s=eid_to ' + # 'FROM %(rtype)s_relation ' + # 'WHERE %(etype)s.eid=%(rtype)s_relation.eid_from' + # % locals()) + table = SQL_PREFIX + str(etype) + cursor = sqlexec('SELECT eid_from, eid_to FROM %(table)s, ' + '%(rtype)s_relation WHERE %(table)s.%(eidcolumn)s=' + '%(rtype)s_relation.eid_from' % locals()) + args = [{'val': eid_to, 'x': eid} for eid, eid_to in cursor.fetchall()] + if args: + column = SQL_PREFIX + rtype + cursor.executemany('UPDATE %s SET %s=%%(val)s WHERE %s=%%(x)s' + % (table, column, eidcolumn), args) + # drop existant table + DropRelationTable(session, rtype) + + +class SourceDbCWAttributeAdd(hook.Operation): + """an attribute relation (CWAttribute) has been added: + * add the necessary column + * set default on this column if any and possible + * register an operation to add the relation definition to the + instance's schema on commit + + constraints are handled by specific hooks + """ + entity = None # make pylint happy + + def init_rdef(self, **kwargs): + entity = self.entity + fromentity = entity.stype + self.session.execute('SET X ordernum Y+1 ' + 'WHERE X from_entity SE, SE eid %(se)s, X ordernum Y, ' + 'X ordernum >= %(order)s, NOT X eid %(x)s', + {'x': entity.eid, 'se': fromentity.eid, + 'order': entity.ordernum or 0}) + subj = str(fromentity.name) + rtype = entity.rtype.name + obj = str(entity.otype.name) + constraints = get_constraints(self.session, entity) + rdef = RelationDefinition(subj, rtype, obj, + description=entity.description, + cardinality=entity.cardinality, + constraints=constraints, + order=entity.ordernum, + eid=entity.eid, + **kwargs) + MemSchemaRDefAdd(self.session, rdef) + return rdef + + def precommit_event(self): + session = self.session + entity = self.entity + # entity.defaultval is a string or None, but we need a correctly typed + # value + default = entity.defaultval + if default is not None: + default = TYPE_CONVERTER[entity.otype.name](default) + props = {'default': default, + 'indexed': entity.indexed, + 'fulltextindexed': entity.fulltextindexed, + 'internationalizable': entity.internationalizable} + rdef = self.init_rdef(**props) + sysource = session.pool.source('system') + attrtype = type_from_constraints(sysource.dbhelper, rdef.object, + rdef.constraints) + # XXX should be moved somehow into lgc.adbh: sqlite doesn't support to + # add a new column with UNIQUE, it should be added after the ALTER TABLE + # using ADD INDEX + if sysource.dbdriver == 'sqlite' and 'UNIQUE' in attrtype: + extra_unique_index = True + attrtype = attrtype.replace(' UNIQUE', '') + else: + extra_unique_index = False + # added some str() wrapping query since some backend (eg psycopg) don't + # allow unicode queries + table = SQL_PREFIX + rdef.subject + column = SQL_PREFIX + rdef.name + try: + session.system_sql(str('ALTER TABLE %s ADD COLUMN %s %s' + % (table, column, attrtype)), + rollback_on_failure=False) + self.info('added column %s to table %s', table, column) + except Exception, ex: + # the column probably already exists. this occurs when + # the entity's type has just been added or if the column + # has not been previously dropped + self.error('error while altering table %s: %s', table, ex) + if extra_unique_index or entity.indexed: + try: + sysource.create_index(session, table, column, + unique=extra_unique_index) + except Exception, ex: + self.error('error while creating index for %s.%s: %s', + table, column, ex) + # final relations are not infered, propagate + try: + eschema = session.vreg.schema.eschema(rdef.subject) + except KeyError: + return # entity type currently being added + # propagate attribute to children classes + rschema = session.vreg.schema.rschema(rdef.name) + # if relation type has been inserted in the same transaction, its final + # attribute is still set to False, so we've to ensure it's False + rschema.final = True + # XXX 'infered': True/False, not clear actually + props.update({'constraints': rdef.constraints, + 'description': rdef.description, + 'cardinality': rdef.cardinality, + 'constraints': rdef.constraints, + 'permissions': rdef.get_permissions(), + 'order': rdef.order}) + groupmap = group_mapping(session) + for specialization in eschema.specialized_by(False): + if (specialization, rdef.object) in rschema.rdefs: + continue + sperdef = RelationDefinitionSchema(specialization, rschema, rdef.object, props) + for rql, args in ss.rdef2rql(rschema, str(specialization), + rdef.object, sperdef, groupmap=groupmap): + session.execute(rql, args) + # set default value, using sql for performance and to avoid + # modification_date update + if default: + session.system_sql('UPDATE %s SET %s=%%(default)s' % (table, column), + {'default': default}) + + +class SourceDbCWRelationAdd(SourceDbCWAttributeAdd): + """an actual relation has been added: + * if this is an inlined relation, add the necessary column + else if it's the first instance of this relation type, add the + necessary table and set default permissions + * register an operation to add the relation definition to the + instance's schema on commit + + constraints are handled by specific hooks + """ + entity = None # make pylint happy + + def precommit_event(self): + session = self.session + entity = self.entity + rdef = self.init_rdef(composite=entity.composite) + schema = session.vreg.schema + rtype = rdef.name + rschema = session.vreg.schema.rschema(rtype) + # this have to be done before permissions setting + if rschema.inlined: + # need to add a column if the relation is inlined and if this is the + # first occurence of "Subject relation Something" whatever Something + # and if it has not been added during other event of the same + # transaction + key = '%s.%s' % (rdef.subject, rtype) + try: + alreadythere = bool(rschema.objects(rdef.subject)) + except KeyError: + alreadythere = False + if not (alreadythere or + key in session.transaction_data.get('createdattrs', ())): + add_inline_relation_column(session, rdef.subject, rtype) + else: + # need to create the relation if no relation definition in the + # schema and if it has not been added during other event of the same + # transaction + if not (rschema.subjects() or + rtype in session.transaction_data.get('createdtables', ())): + try: + rschema = session.vreg.schema.rschema(rtype) + tablesql = rschema2sql(rschema) + except KeyError: + # fake we add it to the schema now to get a correctly + # initialized schema but remove it before doing anything + # more dangerous... + rschema = session.vreg.schema.add_relation_type(rdef) + tablesql = rschema2sql(rschema) + session.vreg.schema.del_relation_type(rtype) + # create the necessary table + for sql in tablesql.split(';'): + if sql.strip(): + session.system_sql(sql) + session.transaction_data.setdefault('createdtables', []).append( + rtype) + + +class SourceDbRDefUpdate(hook.Operation): + """actually update some properties of a relation definition""" + rschema = values = None # make pylint happy + + def precommit_event(self): + etype = self.kobj[0] + table = SQL_PREFIX + etype + column = SQL_PREFIX + self.rschema.type + if 'indexed' in self.values: + sysource = self.session.pool.source('system') + if self.values['indexed']: + sysource.create_index(self.session, table, column) + else: + sysource.drop_index(self.session, table, column) + if 'cardinality' in self.values and self.rschema.final: + adbh = self.session.pool.source('system').dbhelper + if not adbh.alter_column_support: + # not supported (and NOT NULL not set by yams in that case, so + # no worry) + return + atype = self.rschema.objects(etype)[0] + constraints = self.rschema.rdef(etype, atype).constraints + coltype = type_from_constraints(adbh, atype, constraints, + creating=False) + # XXX check self.values['cardinality'][0] actually changed? + sql = adbh.sql_set_null_allowed(table, column, coltype, + self.values['cardinality'][0] != '1') + self.session.system_sql(sql) + + +class SourceDbCWConstraintAdd(hook.Operation): + """actually update constraint of a relation definition""" + entity = None # make pylint happy + cancelled = False + + def precommit_event(self): + rdef = self.entity.reverse_constrained_by[0] + session = self.session + # when the relation is added in the same transaction, the constraint + # object is created by the operation adding the attribute or relation, + # so there is nothing to do here + if session.added_in_transaction(rdef.eid): + return + rdefschema = session.vreg.schema.schema_by_eid(rdef.eid) + subjtype, rtype, objtype = rdefschema.as_triple() + cstrtype = self.entity.type + oldcstr = rtype.rdef(subjtype, objtype).constraint_by_type(cstrtype) + newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value) + table = SQL_PREFIX + str(subjtype) + column = SQL_PREFIX + str(rtype) + # alter the physical schema on size constraint changes + if newcstr.type() == 'SizeConstraint' and ( + oldcstr is None or oldcstr.max != newcstr.max): + adbh = self.session.pool.source('system').dbhelper + card = rtype.rdef(subjtype, objtype).cardinality + coltype = type_from_constraints(adbh, objtype, [newcstr], + creating=False) + sql = adbh.sql_change_col_type(table, column, coltype, card != '1') + try: + session.system_sql(sql, rollback_on_failure=False) + self.info('altered column %s of table %s: now VARCHAR(%s)', + column, table, newcstr.max) + except Exception, ex: + # not supported by sqlite for instance + self.error('error while altering table %s: %s', table, ex) + elif cstrtype == 'UniqueConstraint' and oldcstr is None: + session.pool.source('system').create_index( + self.session, table, column, unique=True) + + +class SourceDbCWConstraintDel(hook.Operation): + """actually remove a constraint of a relation definition""" + rtype = subjtype = objtype = None # make pylint happy + + def precommit_event(self): + cstrtype = self.cstr.type() + table = SQL_PREFIX + str(self.subjtype) + column = SQL_PREFIX + str(self.rtype) + # alter the physical schema on size/unique constraint changes + if cstrtype == 'SizeConstraint': + try: + self.session.system_sql('ALTER TABLE %s ALTER COLUMN %s TYPE TEXT' + % (table, column), + rollback_on_failure=False) + self.info('altered column %s of table %s: now TEXT', + column, table) + except Exception, ex: + # not supported by sqlite for instance + self.error('error while altering table %s: %s', table, ex) + elif cstrtype == 'UniqueConstraint': + self.session.pool.source('system').drop_index( + self.session, table, column, unique=True) + + +# operations for in-memory schema synchronization ############################# + +class MemSchemaCWETypeAdd(MemSchemaEarlyOperation): + """actually add the entity type to the instance's schema""" + eid = None # make pylint happy + def commit_event(self): + self.session.vreg.schema.add_entity_type(self.kobj) + + +class MemSchemaCWETypeRename(MemSchemaOperation): + """this operation updates physical storage accordingly""" + oldname = newname = None # make pylint happy + + def commit_event(self): + self.session.vreg.schema.rename_entity_type(self.oldname, self.newname) + + +class MemSchemaCWETypeDel(MemSchemaOperation): + """actually remove the entity type from the instance's schema""" + def commit_event(self): + try: + # del_entity_type also removes entity's relations + self.session.vreg.schema.del_entity_type(self.kobj) + except KeyError: + # s/o entity type have already been deleted + pass + + +class MemSchemaCWRTypeAdd(MemSchemaEarlyOperation): + """actually add the relation type to the instance's schema""" + eid = None # make pylint happy + def commit_event(self): + self.session.vreg.schema.add_relation_type(self.kobj) + + +class MemSchemaCWRTypeUpdate(MemSchemaOperation): + """actually update some properties of a relation definition""" + rschema = values = None # make pylint happy + + def commit_event(self): + # structure should be clean, not need to remove entity's relations + # at this point + self.rschema.__dict__.update(self.values) + + +class MemSchemaCWRTypeDel(MemSchemaOperation): + """actually remove the relation type from the instance's schema""" + def commit_event(self): + try: + self.session.vreg.schema.del_relation_type(self.kobj) + except KeyError: + # s/o entity type have already been deleted + pass + + +class MemSchemaRDefAdd(MemSchemaEarlyOperation): + """actually add the attribute relation definition to the instance's + schema + """ + def commit_event(self): + self.session.vreg.schema.add_relation_def(self.kobj) + + +class MemSchemaRDefUpdate(MemSchemaOperation): + """actually update some properties of a relation definition""" + rschema = values = None # make pylint happy + + def commit_event(self): + # structure should be clean, not need to remove entity's relations + # at this point + self.rschema.rdefs[self.kobj].update(self.values) + + +class MemSchemaRDefDel(MemSchemaOperation): + """actually remove the relation definition from the instance's schema""" + def commit_event(self): + subjtype, rtype, objtype = self.kobj + try: + self.session.vreg.schema.del_relation_def(subjtype, rtype, objtype) + except KeyError: + # relation type may have been already deleted + pass + + +class MemSchemaCWConstraintAdd(MemSchemaOperation): + """actually update constraint of a relation definition + + has to be called before SourceDbCWConstraintAdd + """ + cancelled = False + + def precommit_event(self): + rdef = self.entity.reverse_constrained_by[0] + # when the relation is added in the same transaction, the constraint + # object is created by the operation adding the attribute or relation, + # so there is nothing to do here + if self.session.added_in_transaction(rdef.eid): + self.cancelled = True + return + rdef = self.session.vreg.schema.schema_by_eid(rdef.eid) + subjtype, rtype, objtype = rdef.as_triple() + self.prepare_constraints(subjtype, rtype, objtype) + cstrtype = self.entity.type + self.cstr = rtype.rdef(subjtype, objtype).constraint_by_type(cstrtype) + self.newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value) + self.newcstr.eid = self.entity.eid + + def commit_event(self): + if self.cancelled: + return + # in-place modification + if not self.cstr is None: + self.constraints.remove(self.cstr) + self.constraints.append(self.newcstr) + + +class MemSchemaCWConstraintDel(MemSchemaOperation): + """actually remove a constraint of a relation definition + + has to be called before SourceDbCWConstraintDel + """ + rtype = subjtype = objtype = None # make pylint happy + def precommit_event(self): + self.prepare_constraints(self.subjtype, self.rtype, self.objtype) + + def commit_event(self): + self.constraints.remove(self.cstr) + + +class MemSchemaPermissionAdd(MemSchemaOperation): + """synchronize schema when a *_permission relation has been added on a group + """ + + def commit_event(self): + """the observed connections pool has been commited""" + try: + erschema = self.session.vreg.schema.schema_by_eid(self.eid) + except KeyError: + # duh, schema not found, log error and skip operation + self.error('no schema for %s', self.eid) + return + perms = list(erschema.action_permissions(self.action)) + if hasattr(self, 'group_eid'): + perm = self.session.entity_from_eid(self.group_eid).name + else: + perm = erschema.rql_expression(self.expr) + try: + perms.index(perm) + self.warning('%s already in permissions for %s on %s', + perm, self.action, erschema) + except ValueError: + perms.append(perm) + erschema.set_action_permissions(self.action, perms) + + +class MemSchemaPermissionDel(MemSchemaPermissionAdd): + """synchronize schema when a *_permission relation has been deleted from a + group + """ + + def commit_event(self): + """the observed connections pool has been commited""" + try: + erschema = self.session.vreg.schema.schema_by_eid(self.eid) + except KeyError: + # duh, schema not found, log error and skip operation + self.error('no schema for %s', self.eid) + return + if isinstance(erschema, RelationSchema): # XXX 3.6 migration + return + perms = list(erschema.action_permissions(self.action)) + if hasattr(self, 'group_eid'): + perm = self.session.entity_from_eid(self.group_eid).name + else: + perm = erschema.rql_expression(self.expr) + try: + perms.remove(perm) + erschema.set_action_permissions(self.action, perms) + except ValueError: + self.error('can\'t remove permission %s for %s on %s', + perm, self.action, erschema) + + +class MemSchemaSpecializesAdd(MemSchemaOperation): + + def commit_event(self): + eschema = self.session.vreg.schema.schema_by_eid(self.etypeeid) + parenteschema = self.session.vreg.schema.schema_by_eid(self.parentetypeeid) + eschema._specialized_type = parenteschema.type + parenteschema._specialized_by.append(eschema.type) + + +class MemSchemaSpecializesDel(MemSchemaOperation): + + def commit_event(self): + try: + eschema = self.session.vreg.schema.schema_by_eid(self.etypeeid) + parenteschema = self.session.vreg.schema.schema_by_eid(self.parentetypeeid) + except KeyError: + # etype removed, nothing to do + return + eschema._specialized_type = None + parenteschema._specialized_by.remove(eschema.type) + + +class SyncSchemaHook(hook.Hook): + __abstract__ = True + category = 'syncschema' + + +# CWEType hooks ################################################################ + +class DelCWETypeHook(SyncSchemaHook): + """before deleting a CWEType entity: + * check that we don't remove a core entity type + * cascade to delete related CWAttribute and CWRelation entities + * instantiate an operation to delete the entity type on commit + """ + __regid__ = 'syncdelcwetype' + __select__ = SyncSchemaHook.__select__ & implements('CWEType') + events = ('before_delete_entity',) + + def __call__(self): + # final entities can't be deleted, don't care about that + name = self.entity.name + if name in CORE_ETYPES: + raise ValidationError(self.entity.eid, {None: self._cw._('can\'t be deleted')}) + # delete every entities of this type + self._cw.unsafe_execute('DELETE %s X' % name) + DropTable(self._cw, table=SQL_PREFIX + name) + MemSchemaCWETypeDel(self._cw, name) + + +class AfterDelCWETypeHook(DelCWETypeHook): + __regid__ = 'wfcleanup' + events = ('after_delete_entity',) + + def __call__(self): + # workflow cleanup + self._cw.execute('DELETE Workflow X WHERE NOT X workflow_of Y') + + +class AfterAddCWETypeHook(DelCWETypeHook): + """after adding a CWEType entity: + * create the necessary table + * set creation_date and modification_date by creating the necessary + CWAttribute entities + * add owned_by relation by creating the necessary CWRelation entity + * register an operation to add the entity type to the instance's + schema on commit + """ + __regid__ = 'syncaddcwetype' + events = ('after_add_entity',) + + def __call__(self): + entity = self.entity + if entity.get('final'): + return + schema = self._cw.vreg.schema + name = entity['name'] + etype = EntityType(name=name, description=entity.get('description'), + meta=entity.get('meta')) # don't care about final + # fake we add it to the schema now to get a correctly initialized schema + # but remove it before doing anything more dangerous... + schema = self._cw.vreg.schema + eschema = schema.add_entity_type(etype) + # generate table sql and rql to add metadata + tablesql = eschema2sql(self._cw.pool.source('system').dbhelper, eschema, + prefix=SQL_PREFIX) + relrqls = [] + for rtype in (META_RTYPES - VIRTUAL_RTYPES): + rschema = schema[rtype] + sampletype = rschema.subjects()[0] + desttype = rschema.objects()[0] + props = rschema.rdef(sampletype, desttype) + relrqls += list(ss.rdef2rql(rschema, name, desttype, props, + groupmap=group_mapping(self._cw))) + # now remove it ! + schema.del_entity_type(name) + # create the necessary table + for sql in tablesql.split(';'): + if sql.strip(): + self._cw.system_sql(sql) + # register operation to modify the schema on commit + # this have to be done before adding other relations definitions + # or permission settings + etype.eid = entity.eid + MemSchemaCWETypeAdd(self._cw, etype) + # add meta relations + for rql, kwargs in relrqls: + self._cw.execute(rql, kwargs) + + +class BeforeUpdateCWETypeHook(DelCWETypeHook): + """check name change, handle final""" + __regid__ = 'syncupdatecwetype' + events = ('before_update_entity',) + + def __call__(self): + entity = self.entity + check_valid_changes(self._cw, entity, ro_attrs=('final',)) + # don't use getattr(entity, attr), we would get the modified value if any + if 'name' in entity.edited_attributes: + newname = entity.pop('name') + oldname = entity.name + if newname.lower() != oldname.lower(): + SourceDbCWETypeRename(self._cw, oldname=oldname, newname=newname) + MemSchemaCWETypeRename(self._cw, oldname=oldname, newname=newname) + entity['name'] = newname + + +# CWRType hooks ################################################################ + +class DelCWRTypeHook(SyncSchemaHook): + """before deleting a CWRType entity: + * check that we don't remove a core relation type + * cascade to delete related CWAttribute and CWRelation entities + * instantiate an operation to delete the relation type on commit + """ + __regid__ = 'syncdelcwrtype' + __select__ = SyncSchemaHook.__select__ & implements('CWRType') + events = ('before_delete_entity',) + + def __call__(self): + name = self.entity.name + if name in CORE_RTYPES: + raise ValidationError(self.entity.eid, {None: self._cw._('can\'t be deleted')}) + # delete relation definitions using this relation type + self._cw.execute('DELETE CWAttribute X WHERE X relation_type Y, Y eid %(x)s', + {'x': self.entity.eid}) + self._cw.execute('DELETE CWRelation X WHERE X relation_type Y, Y eid %(x)s', + {'x': self.entity.eid}) + MemSchemaCWRTypeDel(self._cw, name) + + +class AfterAddCWRTypeHook(DelCWRTypeHook): + """after a CWRType entity has been added: + * register an operation to add the relation type to the instance's + schema on commit + + We don't know yet this point if a table is necessary + """ + __regid__ = 'syncaddcwrtype' + events = ('after_add_entity',) + + def __call__(self): + entity = self.entity + rtype = RelationType(name=entity.name, + description=entity.get('description'), + meta=entity.get('meta', False), + inlined=entity.get('inlined', False), + symetric=entity.get('symetric', False), + eid=entity.eid) + MemSchemaCWRTypeAdd(self._cw, rtype) + + +class BeforeUpdateCWRTypeHook(DelCWRTypeHook): + """check name change, handle final""" + __regid__ = 'checkupdatecwrtype' + events = ('before_update_entity',) + + def __call__(self): + check_valid_changes(self._cw, self.entity) + + +class AfterUpdateCWRTypeHook(DelCWRTypeHook): + __regid__ = 'syncupdatecwrtype' + events = ('after_update_entity',) + + def __call__(self): + entity = self.entity + rschema = self._cw.vreg.schema.rschema(entity.name) + newvalues = {} + for prop in ('meta', 'symetric', 'inlined'): + if prop in entity: + newvalues[prop] = entity[prop] + if newvalues: + MemSchemaCWRTypeUpdate(self._cw, rschema=rschema, values=newvalues) + SourceDbCWRTypeUpdate(self._cw, rschema=rschema, values=newvalues, + entity=entity) + +def check_valid_changes(session, entity, ro_attrs=('name', 'final')): + errors = {} + # don't use getattr(entity, attr), we would get the modified value if any + for attr in ro_attrs: + if attr in entity.edited_attributes: + origval, newval = hook.entity_oldnewvalue(entity, attr) + if newval != origval: + errors[attr] = session._("can't change the %s attribute") % \ + display_name(session, attr) + if errors: + raise ValidationError(entity.eid, errors) + + +class AfterDelRelationTypeHook(SyncSchemaHook): + """before deleting a CWAttribute or CWRelation entity: + * if this is a final or inlined relation definition, instantiate an + operation to drop necessary column, else if this is the last instance + of a non final relation, instantiate an operation to drop necessary + table + * instantiate an operation to delete the relation definition on commit + * delete the associated relation type when necessary + """ + __regid__ = 'syncdelrelationtype' + __select__ = SyncSchemaHook.__select__ & hook.match_rtype('relation_type') + events = ('after_delete_relation',) + + def __call__(self): + session = self._cw + rdef = session.vreg.schema.schema_by_eid(self.eidfrom) + subjschema, rschema, objschema = rdef.as_triple() + pendings = session.transaction_data.get('pendingeids', ()) + pendingrdefs = session.transaction_data.setdefault('pendingrdefs', set()) + # first delete existing relation if necessary + if rschema.final: + rdeftype = 'CWAttribute' + pendingrdefs.add((subjschema, rschema)) + else: + rdeftype = 'CWRelation' + pendingrdefs.add((subjschema, rschema, objschema)) + if not (subjschema.eid in pendings or objschema.eid in pendings): + session.execute('DELETE X %s Y WHERE X is %s, Y is %s' + % (rschema, subjschema, objschema)) + execute = session.unsafe_execute + rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R,' + 'R eid %%(x)s' % rdeftype, {'x': self.eidto}) + lastrel = rset[0][0] == 0 + # we have to update physical schema systematically for final and inlined + # relations, but only if it's the last instance for this relation type + # for other relations + + if (rschema.final or rschema.inlined): + rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R, ' + 'R eid %%(x)s, X from_entity E, E name %%(name)s' + % rdeftype, {'x': self.eidto, 'name': str(subjschema)}) + if rset[0][0] == 0 and not subjschema.eid in pendings: + ptypes = session.transaction_data.setdefault('pendingrtypes', set()) + ptypes.add(rschema.type) + DropColumn(session, table=SQL_PREFIX + subjschema.type, + column=SQL_PREFIX + rschema.type) + elif lastrel: + DropRelationTable(session, rschema.type) + # if this is the last instance, drop associated relation type + if lastrel and not self.eidto in pendings: + execute('DELETE CWRType X WHERE X eid %(x)s', {'x': self.eidto}, 'x') + MemSchemaRDefDel(session, (subjschema, rschema, objschema)) + + +# CWAttribute / CWRelation hooks ############################################### + +class AfterAddCWAttributeHook(SyncSchemaHook): + __regid__ = 'syncaddcwattribute' + __select__ = SyncSchemaHook.__select__ & implements('CWAttribute') + events = ('after_add_entity',) + + def __call__(self): + SourceDbCWAttributeAdd(self._cw, entity=self.entity) + + +class AfterAddCWRelationHook(AfterAddCWAttributeHook): + __regid__ = 'syncaddcwrelation' + __select__ = SyncSchemaHook.__select__ & implements('CWRelation') + + def __call__(self): + SourceDbCWRelationAdd(self._cw, entity=self.entity) + + +class AfterUpdateCWRDefHook(SyncSchemaHook): + __regid__ = 'syncaddcwattribute' + __select__ = SyncSchemaHook.__select__ & implements('CWAttribute', + 'CWRelation') + events = ('after_update_entity',) + + def __call__(self): + entity = self.entity + if self._cw.deleted_in_transaction(entity.eid): + return + desttype = entity.otype.name + rschema = self._cw.vreg.schema[entity.rtype.name] + newvalues = {} + for prop in RelationDefinitionSchema.rproperty_defs(desttype): + if prop == 'constraints': + continue + if prop == 'order': + prop = 'ordernum' + if prop in entity.edited_attributes: + newvalues[prop] = entity[prop] + if newvalues: + subjtype = entity.stype.name + MemSchemaRDefUpdate(self._cw, kobj=(subjtype, desttype), + rschema=rschema, values=newvalues) + SourceDbRDefUpdate(self._cw, kobj=(subjtype, desttype), + rschema=rschema, values=newvalues) + + +# constraints synchronization hooks ############################################ + +class AfterAddCWConstraintHook(SyncSchemaHook): + __regid__ = 'syncaddcwconstraint' + __select__ = SyncSchemaHook.__select__ & implements('CWConstraint') + events = ('after_add_entity', 'after_update_entity') + + def __call__(self): + MemSchemaCWConstraintAdd(self._cw, entity=self.entity) + SourceDbCWConstraintAdd(self._cw, entity=self.entity) + + +class AfterAddConstrainedByHook(SyncSchemaHook): + __regid__ = 'syncdelconstrainedby' + __select__ = SyncSchemaHook.__select__ & hook.match_rtype('constrained_by') + events = ('after_add_relation',) + + def __call__(self): + if self._cw.added_in_transaction(self.eidfrom): + self._cw.transaction_data.setdefault(self.eidfrom, []).append(self.eidto) + + +class BeforeDeleteConstrainedByHook(AfterAddConstrainedByHook): + __regid__ = 'syncdelconstrainedby' + events = ('before_delete_relation',) + + def __call__(self): + if self._cw.deleted_in_transaction(self.eidfrom): + return + schema = self._cw.vreg.schema + entity = self._cw.entity_from_eid(self.eidto) + rdef = schema.schema_by_eid(self.eidfrom) + try: + cstr = rdef.constraint_by_type(entity.type) + except IndexError: + self._cw.critical('constraint type no more accessible') + else: + subjtype, rtype, objtype = rdef.as_triple() + SourceDbCWConstraintDel(self._cw, subjtype=subjtype, rtype=rtype, + objtype=objtype, cstr=cstr) + MemSchemaCWConstraintDel(self._cw, subjtype=subjtype, rtype=rtype, + objtype=objtype, cstr=cstr) + + +# permissions synchronization hooks ############################################ + +class AfterAddPermissionHook(SyncSchemaHook): + """added entity/relation *_permission, need to update schema""" + __regid__ = 'syncaddperm' + __select__ = SyncSchemaHook.__select__ & hook.match_rtype( + 'read_permission', 'add_permission', 'delete_permission', + 'update_permission') + events = ('after_add_relation',) + + def __call__(self): + action = self.rtype.split('_', 1)[0] + if self._cw.describe(self.eidto)[0] == 'CWGroup': + MemSchemaPermissionAdd(self._cw, action=action, eid=self.eidfrom, + group_eid=self.eidto) + else: # RQLExpression + expr = self._cw.entity_from_eid(self.eidto).expression + MemSchemaPermissionAdd(self._cw, action=action, eid=self.eidfrom, + expr=expr) + + +class BeforeDelPermissionHook(AfterAddPermissionHook): + """delete entity/relation *_permission, need to update schema + + skip the operation if the related type is being deleted + """ + __regid__ = 'syncdelperm' + events = ('before_delete_relation',) + + def __call__(self): + if self._cw.deleted_in_transaction(self.eidfrom): + return + action = self.rtype.split('_', 1)[0] + if self._cw.describe(self.eidto)[0] == 'CWGroup': + MemSchemaPermissionDel(self._cw, action=action, eid=self.eidfrom, + group_eid=self.eidto) + else: # RQLExpression + expr = self._cw.entity_from_eid(self.eidto).expression + MemSchemaPermissionDel(self._cw, action=action, eid=self.eidfrom, + expr=expr) + + +# specializes synchronization hooks ############################################ + + +class AfterAddSpecializesHook(SyncSchemaHook): + __regid__ = 'syncaddspecializes' + __select__ = SyncSchemaHook.__select__ & hook.match_rtype('specializes') + events = ('after_add_relation',) + + def __call__(self): + MemSchemaSpecializesAdd(self._cw, etypeeid=self.eidfrom, + parentetypeeid=self.eidto) + + +class AfterDelSpecializesHook(SyncSchemaHook): + __regid__ = 'syncdelspecializes' + __select__ = SyncSchemaHook.__select__ & hook.match_rtype('specializes') + events = ('after_delete_relation',) + + def __call__(self): + MemSchemaSpecializesDel(self._cw, etypeeid=self.eidfrom, + parentetypeeid=self.eidto) diff -r 15d541321a8c -r 74c1597f8a82 hooks/syncsession.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hooks/syncsession.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,235 @@ +"""Core hooks: synchronize living session on persistent data changes + +:organization: Logilab +:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +from cubicweb import UnknownProperty, ValidationError, BadConnectionId +from cubicweb.selectors import implements +from cubicweb.server import hook + + +def get_user_sessions(repo, ueid): + for session in repo._sessions.values(): + if ueid == session.user.eid: + yield session + + +class SyncSessionHook(hook.Hook): + __abstract__ = True + category = 'syncsession' + + +# user/groups synchronisation ################################################# + +class _GroupOperation(hook.Operation): + """base class for group operation""" + geid = None + def __init__(self, session, *args, **kwargs): + """override to get the group name before actual groups manipulation: + + we may temporarily loose right access during a commit event, so + no query should be emitted while comitting + """ + rql = 'Any N WHERE G eid %(x)s, G name N' + result = session.execute(rql, {'x': kwargs['geid']}, 'x', build_descr=False) + hook.Operation.__init__(self, session, *args, **kwargs) + self.group = result[0][0] + + +class _DeleteGroupOp(_GroupOperation): + """synchronize user when a in_group relation has been deleted""" + def commit_event(self): + """the observed connections pool has been commited""" + groups = self.cnxuser.groups + try: + groups.remove(self.group) + except KeyError: + self.error('user %s not in group %s', self.cnxuser, self.group) + return + + +class _AddGroupOp(_GroupOperation): + """synchronize user when a in_group relation has been added""" + def commit_event(self): + """the observed connections pool has been commited""" + groups = self.cnxuser.groups + if self.group in groups: + self.warning('user %s already in group %s', self.cnxuser, + self.group) + return + groups.add(self.group) + + +class SyncInGroupHook(SyncSessionHook): + __regid__ = 'syncingroup' + __select__ = SyncSessionHook.__select__ & hook.match_rtype('in_group') + events = ('after_delete_relation', 'after_add_relation') + + def __call__(self): + if self.event == 'after_delete_relation': + opcls = _DeleteGroupOp + else: + opcls = _AddGroupOp + for session in get_user_sessions(self._cw.repo, self.eidfrom): + opcls(self._cw, cnxuser=session.user, geid=self.eidto) + + +class _DelUserOp(hook.Operation): + """close associated user's session when it is deleted""" + def __init__(self, session, cnxid): + self.cnxid = cnxid + hook.Operation.__init__(self, session) + + def commit_event(self): + """the observed connections pool has been commited""" + try: + self.session.repo.close(self.cnxid) + except BadConnectionId: + pass # already closed + + +class CloseDeletedUserSessionsHook(SyncSessionHook): + __regid__ = 'closession' + __select__ = SyncSessionHook.__select__ & implements('CWUser') + events = ('after_delete_entity',) + + def __call__(self): + """modify user permission, need to update users""" + for session in get_user_sessions(self._cw.repo, self.entity.eid): + _DelUserOp(self._cw, session.id) + + +# CWProperty hooks ############################################################# + + +class _DelCWPropertyOp(hook.Operation): + """a user's custom properties has been deleted""" + + def commit_event(self): + """the observed connections pool has been commited""" + try: + del self.cwpropdict[self.key] + except KeyError: + self.error('%s has no associated value', self.key) + + +class _ChangeCWPropertyOp(hook.Operation): + """a user's custom properties has been added/changed""" + + def commit_event(self): + """the observed connections pool has been commited""" + self.cwpropdict[self.key] = self.value + + +class _AddCWPropertyOp(hook.Operation): + """a user's custom properties has been added/changed""" + + def commit_event(self): + """the observed connections pool has been commited""" + cwprop = self.cwprop + if not cwprop.for_user: + self.session.vreg['propertyvalues'][cwprop.pkey] = cwprop.value + # if for_user is set, update is handled by a ChangeCWPropertyOp operation + + +class AddCWPropertyHook(SyncSessionHook): + __regid__ = 'addcwprop' + __select__ = SyncSessionHook.__select__ & implements('CWProperty') + events = ('after_add_entity',) + + def __call__(self): + key, value = self.entity.pkey, self.entity.value + session = self._cw + try: + value = session.vreg.typed_value(key, value) + except UnknownProperty: + raise ValidationError(self.entity.eid, + {'pkey': session._('unknown property key')}) + except ValueError, ex: + raise ValidationError(self.entity.eid, + {'value': session._(str(ex))}) + if not session.user.matching_groups('managers'): + session.add_relation(entity.eid, 'for_user', session.user.eid) + else: + _AddCWPropertyOp(session, cwprop=self.entity) + + +class UpdateCWPropertyHook(AddCWPropertyHook): + __regid__ = 'updatecwprop' + events = ('after_update_entity',) + + def __call__(self): + entity = self.entity + if not ('pkey' in entity.edited_attributes or + 'value' in entity.edited_attributes): + return + key, value = entity.pkey, entity.value + session = self._cw + try: + value = session.vreg.typed_value(key, value) + except UnknownProperty: + return + except ValueError, ex: + raise ValidationError(entity.eid, {'value': session._(str(ex))}) + if entity.for_user: + for session_ in get_user_sessions(session.repo, entity.for_user[0].eid): + _ChangeCWPropertyOp(session, cwpropdict=session_.user.properties, + key=key, value=value) + else: + # site wide properties + _ChangeCWPropertyOp(session, cwpropdict=session.vreg['propertyvalues'], + key=key, value=value) + + +class DeleteCWPropertyHook(AddCWPropertyHook): + __regid__ = 'delcwprop' + events = ('before_delete_entity',) + + def __call__(self): + eid = self.entity.eid + session = self._cw + for eidfrom, rtype, eidto in session.transaction_data.get('pendingrelations', ()): + if rtype == 'for_user' and eidfrom == self.entity.eid: + # if for_user was set, delete has already been handled + break + else: + _DelCWPropertyOp(session, cwpropdict=session.vreg['propertyvalues'], + key=self.entity.pkey) + + +class AddForUserRelationHook(SyncSessionHook): + __regid__ = 'addcwpropforuser' + __select__ = SyncSessionHook.__select__ & hook.match_rtype('for_user') + events = ('after_add_relation',) + + def __call__(self): + session = self._cw + eidfrom = self.eidfrom + if not session.describe(eidfrom)[0] == 'CWProperty': + return + key, value = session.execute('Any K,V WHERE P eid %(x)s,P pkey K,P value V', + {'x': eidfrom}, 'x')[0] + if session.vreg.property_info(key)['sitewide']: + raise ValidationError(eidfrom, + {'for_user': session._("site-wide property can't be set for user")}) + for session_ in get_user_sessions(session.repo, self.eidto): + _ChangeCWPropertyOp(session, cwpropdict=session_.user.properties, + key=key, value=value) + + +class DelForUserRelationHook(AddForUserRelationHook): + __regid__ = 'delcwpropforuser' + events = ('after_delete_relation',) + + def __call__(self): + session = self._cw + key = session.execute('Any K WHERE P eid %(x)s, P pkey K', + {'x': self.eidfrom}, 'x')[0][0] + session.transaction_data.setdefault('pendingrelations', []).append( + (self.eidfrom, self.rtype, self.eidto)) + for session_ in get_user_sessions(session.repo, self.eidto): + _DelCWPropertyOp(session, cwpropdict=session_.user.properties, key=key) diff -r 15d541321a8c -r 74c1597f8a82 hooks/test/data/bootstrap_cubes --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hooks/test/data/bootstrap_cubes Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,1 @@ +email diff -r 15d541321a8c -r 74c1597f8a82 hooks/test/unittest_bookmarks.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hooks/test/unittest_bookmarks.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,27 @@ +""" + +:organization: Logilab +:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +from logilab.common.testlib import unittest_main +from cubicweb.devtools.testlib import CubicWebTC + +class BookmarkHooksTC(CubicWebTC): + + + def test_auto_delete_bookmarks(self): + beid = self.execute('INSERT Bookmark X: X title "hop", X path "view", X bookmarked_by U ' + 'WHERE U login "admin"')[0][0] + self.execute('SET X bookmarked_by U WHERE U login "anon"') + self.commit() + self.execute('DELETE X bookmarked_by U WHERE U login "admin"') + self.commit() + self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': beid}, 'x')) + self.execute('DELETE X bookmarked_by U WHERE U login "anon"') + self.commit() + self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': beid}, 'x')) + +if __name__ == '__main__': + unittest_main() diff -r 15d541321a8c -r 74c1597f8a82 hooks/test/unittest_hooks.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hooks/test/unittest_hooks.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,507 @@ +# -*- coding: utf-8 -*- +"""functional tests for core hooks + +note: most schemahooks.py hooks are actually tested in unittest_migrations.py +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" + +from logilab.common.testlib import TestCase, unittest_main + +from datetime import datetime + +from cubicweb import (ConnectionError, ValidationError, AuthenticationError, + BadConnectionId) +from cubicweb.devtools.testlib import CubicWebTC, get_versions + +from cubicweb.server.sqlutils import SQL_PREFIX +from cubicweb.server.repository import Repository + +orig_get_versions = Repository.get_versions + +def setup_module(*args): + Repository.get_versions = get_versions + +def teardown_module(*args): + Repository.get_versions = orig_get_versions + + + +class CoreHooksTC(CubicWebTC): + + def test_delete_internal_entities(self): + self.assertRaises(ValidationError, self.execute, + 'DELETE CWEType X WHERE X name "CWEType"') + self.assertRaises(ValidationError, self.execute, + 'DELETE CWRType X WHERE X name "relation_type"') + self.assertRaises(ValidationError, self.execute, + 'DELETE CWGroup X WHERE X name "owners"') + + def test_delete_required_relations_subject(self): + self.execute('INSERT CWUser X: X login "toto", X upassword "hop", X in_group Y ' + 'WHERE Y name "users"') + self.commit() + self.execute('DELETE X in_group Y WHERE X login "toto", Y name "users"') + self.assertRaises(ValidationError, self.commit) + self.execute('DELETE X in_group Y WHERE X login "toto"') + self.execute('SET X in_group Y WHERE X login "toto", Y name "guests"') + self.commit() + + def test_delete_required_relations_object(self): + self.skip('no sample in the schema ! YAGNI ? Kermaat ?') + + def test_static_vocabulary_check(self): + self.assertRaises(ValidationError, + self.execute, + 'SET X composite "whatever" WHERE X from_entity FE, FE name "CWUser", X relation_type RT, RT name "in_group"') + + def test_missing_required_relations_subject_inline(self): + # missing in_group relation + self.execute('INSERT CWUser X: X login "toto", X upassword "hop"') + self.assertRaises(ValidationError, + self.commit) + + def test_inlined(self): + self.assertEquals(self.repo.schema['sender'].inlined, True) + self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') + self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"') + eeid = self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P ' + 'WHERE Y is EmailAddress, P is EmailPart')[0][0] + self.execute('SET X sender Y WHERE X is Email, Y is EmailAddress') + rset = self.execute('Any S WHERE X sender S, X eid %s' % eeid) + self.assertEquals(len(rset), 1) + + def test_composite_1(self): + self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') + self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"') + self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P ' + 'WHERE Y is EmailAddress, P is EmailPart') + self.failUnless(self.execute('Email X WHERE X sender Y')) + self.commit() + self.execute('DELETE Email X') + rset = self.execute('Any X WHERE X is EmailPart') + self.assertEquals(len(rset), 1) + self.commit() + rset = self.execute('Any X WHERE X is EmailPart') + self.assertEquals(len(rset), 0) + + def test_composite_2(self): + self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') + self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"') + self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P ' + 'WHERE Y is EmailAddress, P is EmailPart') + self.commit() + self.execute('DELETE Email X') + self.execute('DELETE EmailPart X') + self.commit() + rset = self.execute('Any X WHERE X is EmailPart') + self.assertEquals(len(rset), 0) + + def test_composite_redirection(self): + self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') + self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"') + self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P ' + 'WHERE Y is EmailAddress, P is EmailPart') + self.execute('INSERT Email X: X messageid "<2345>", X subject "test2", X sender Y, X recipients Y ' + 'WHERE Y is EmailAddress') + self.commit() + self.execute('DELETE X parts Y WHERE X messageid "<1234>"') + self.execute('SET X parts Y WHERE X messageid "<2345>"') + self.commit() + rset = self.execute('Any X WHERE X is EmailPart') + self.assertEquals(len(rset), 1) + self.assertEquals(rset.get_entity(0, 0).reverse_parts[0].messageid, '<2345>') + + def test_unsatisfied_constraints(self): + releid = self.execute('INSERT CWRelation X: X from_entity FE, X relation_type RT, X to_entity TE ' + 'WHERE FE name "CWUser", RT name "in_group", TE name "String"')[0][0] + self.execute('SET X read_permission Y WHERE X eid %(x)s, Y name "managers"', + {'x': releid}, 'x') + ex = self.assertRaises(ValidationError, + self.commit) + self.assertEquals(ex.errors, {'to_entity': 'RQLConstraint O final FALSE failed'}) + + def test_html_tidy_hook(self): + req = self.request() + entity = req.create_entity('Workflow', name=u'wf1', description_format=u'text/html', + description=u'yo') + self.assertEquals(entity.description, u'yo') + entity = req.create_entity('Workflow', name=u'wf2', description_format=u'text/html', + description=u'yo') + self.assertEquals(entity.description, u'yo') + entity = req.create_entity('Workflow', name=u'wf3', description_format=u'text/html', + description=u'yo') + self.assertEquals(entity.description, u'yo') + entity = req.create_entity('Workflow', name=u'wf4', description_format=u'text/html', + description=u'R&D') + self.assertEquals(entity.description, u'R&D') + entity = req.create_entity('Workflow', name=u'wf5', description_format=u'text/html', + description=u"
    c'est l'été") + self.assertEquals(entity.description, u"
    c'est l'été
    ") + + def test_nonregr_html_tidy_hook_no_update(self): + entity = self.request().create_entity('Workflow', name=u'wf1', description_format=u'text/html', + description=u'yo') + entity.set_attributes(name=u'wf2') + self.assertEquals(entity.description, u'yo') + entity.set_attributes(description=u'R&D

    yo') + entity.pop('description') + self.assertEquals(entity.description, u'R&D

    yo

    ') + + + def test_metadata_cwuri(self): + entity = self.request().create_entity('Workflow', name=u'wf1') + self.assertEquals(entity.cwuri, self.repo.config['base-url'] + 'eid/%s' % entity.eid) + + def test_metadata_creation_modification_date(self): + _now = datetime.now() + entity = self.request().create_entity('Workflow', name=u'wf1') + self.assertEquals((entity.creation_date - _now).seconds, 0) + self.assertEquals((entity.modification_date - _now).seconds, 0) + + def test_metadata_created_by(self): + entity = self.request().create_entity('Bookmark', title=u'wf1', path=u'/view') + self.commit() # fire operations + self.assertEquals(len(entity.created_by), 1) # make sure we have only one creator + self.assertEquals(entity.created_by[0].eid, self.session.user.eid) + + def test_metadata_owned_by(self): + entity = self.request().create_entity('Bookmark', title=u'wf1', path=u'/view') + self.commit() # fire operations + self.assertEquals(len(entity.owned_by), 1) # make sure we have only one owner + self.assertEquals(entity.owned_by[0].eid, self.session.user.eid) + + def test_user_login_stripped(self): + u = self.create_user(' joe ') + tname = self.execute('Any L WHERE E login L, E eid %(e)s', + {'e': u.eid})[0][0] + self.assertEquals(tname, 'joe') + self.execute('SET X login " jijoe " WHERE X eid %(x)s', {'x': u.eid}) + tname = self.execute('Any L WHERE E login L, E eid %(e)s', + {'e': u.eid})[0][0] + self.assertEquals(tname, 'jijoe') + + + +class UserGroupHooksTC(CubicWebTC): + + def test_user_synchronization(self): + self.create_user('toto', password='hop', commit=False) + self.assertRaises(AuthenticationError, + self.repo.connect, u'toto', password='hop') + self.commit() + cnxid = self.repo.connect(u'toto', password='hop') + self.failIfEqual(cnxid, self.session.id) + self.execute('DELETE CWUser X WHERE X login "toto"') + self.repo.execute(cnxid, 'State X') + self.commit() + self.assertRaises(BadConnectionId, + self.repo.execute, cnxid, 'State X') + + def test_user_group_synchronization(self): + user = self.session.user + self.assertEquals(user.groups, set(('managers',))) + self.execute('SET X in_group G WHERE X eid %s, G name "guests"' % user.eid) + self.assertEquals(user.groups, set(('managers',))) + self.commit() + self.assertEquals(user.groups, set(('managers', 'guests'))) + self.execute('DELETE X in_group G WHERE X eid %s, G name "guests"' % user.eid) + self.assertEquals(user.groups, set(('managers', 'guests'))) + self.commit() + self.assertEquals(user.groups, set(('managers',))) + + def test_user_composite_owner(self): + ueid = self.create_user('toto').eid + # composite of euser should be owned by the euser regardless of who created it + self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", U use_email X ' + 'WHERE U login "toto"') + self.commit() + self.assertEquals(self.execute('Any A WHERE X owned_by U, U use_email X,' + 'U login "toto", X address A')[0][0], + 'toto@logilab.fr') + + def test_no_created_by_on_deleted_entity(self): + eid = self.execute('INSERT EmailAddress X: X address "toto@logilab.fr"')[0][0] + self.execute('DELETE EmailAddress X WHERE X eid %s' % eid) + self.commit() + self.failIf(self.execute('Any X WHERE X created_by Y, X eid >= %(x)s', {'x': eid})) + + +class CWPropertyHooksTC(CubicWebTC): + + def test_unexistant_eproperty(self): + ex = self.assertRaises(ValidationError, + self.execute, 'INSERT CWProperty X: X pkey "bla.bla", X value "hop", X for_user U') + self.assertEquals(ex.errors, {'pkey': 'unknown property key'}) + ex = self.assertRaises(ValidationError, + self.execute, 'INSERT CWProperty X: X pkey "bla.bla", X value "hop"') + self.assertEquals(ex.errors, {'pkey': 'unknown property key'}) + + def test_site_wide_eproperty(self): + ex = self.assertRaises(ValidationError, + self.execute, 'INSERT CWProperty X: X pkey "ui.site-title", X value "hop", X for_user U') + self.assertEquals(ex.errors, {'for_user': "site-wide property can't be set for user"}) + + def test_bad_type_eproperty(self): + ex = self.assertRaises(ValidationError, + self.execute, 'INSERT CWProperty X: X pkey "ui.language", X value "hop", X for_user U') + self.assertEquals(ex.errors, {'value': u'unauthorized value'}) + ex = self.assertRaises(ValidationError, + self.execute, 'INSERT CWProperty X: X pkey "ui.language", X value "hop"') + self.assertEquals(ex.errors, {'value': u'unauthorized value'}) + + +class SchemaHooksTC(CubicWebTC): + + def test_duplicate_etype_error(self): + # check we can't add a CWEType or CWRType entity if it already exists one + # with the same name + self.assertRaises(ValidationError, + self.execute, 'INSERT CWEType X: X name "CWUser"') + self.assertRaises(ValidationError, + self.execute, 'INSERT CWRType X: X name "in_group"') + + def test_validation_unique_constraint(self): + self.assertRaises(ValidationError, + self.execute, 'INSERT CWUser X: X login "admin"') + try: + self.execute('INSERT CWUser X: X login "admin"') + except ValidationError, ex: + self.assertIsInstance(ex.entity, int) + self.assertEquals(ex.errors, {'login': 'the value "admin" is already used, use another one'}) + + +class SchemaModificationHooksTC(CubicWebTC): + + @classmethod + def init_config(cls, config): + super(SchemaModificationHooksTC, cls).init_config(config) + config._cubes = None + cls.repo.fill_schema() + + def index_exists(self, etype, attr, unique=False): + self.session.set_pool() + dbhelper = self.session.pool.source('system').dbhelper + sqlcursor = self.session.pool['system'] + return dbhelper.index_exists(sqlcursor, SQL_PREFIX + etype, SQL_PREFIX + attr, unique=unique) + + def _set_perms(self, eid): + self.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup', + {'x': eid}, 'x') + self.execute('SET X add_permission G WHERE X eid %(x)s, G is CWGroup, G name "managers"', + {'x': eid}, 'x') + self.execute('SET X delete_permission G WHERE X eid %(x)s, G is CWGroup, G name "owners"', + {'x': eid}, 'x') + + def test_base(self): + schema = self.repo.schema + self.session.set_pool() + dbhelper = self.session.pool.source('system').dbhelper + sqlcursor = self.session.pool['system'] + self.failIf(schema.has_entity('Societe2')) + self.failIf(schema.has_entity('concerne2')) + # schema should be update on insertion (after commit) + eeid = self.execute('INSERT CWEType X: X name "Societe2", X description "", X final FALSE')[0][0] + self._set_perms(eeid) + self.execute('INSERT CWRType X: X name "concerne2", X description "", X final FALSE, X symetric FALSE') + self.failIf(schema.has_entity('Societe2')) + self.failIf(schema.has_entity('concerne2')) + # have to commit before adding definition relations + self.commit() + self.failUnless(schema.has_entity('Societe2')) + self.failUnless(schema.has_relation('concerne2')) + attreid = self.execute('INSERT CWAttribute X: X cardinality "11", X defaultval "noname", ' + ' X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F ' + 'WHERE RT name "name", E name "Societe2", F name "String"')[0][0] + self._set_perms(attreid) + concerne2_rdef_eid = self.execute( + 'INSERT CWRelation X: X cardinality "**", X relation_type RT, X from_entity E, X to_entity E ' + 'WHERE RT name "concerne2", E name "Societe2"')[0][0] + self._set_perms(concerne2_rdef_eid) + self.failIf('name' in schema['Societe2'].subject_relations()) + self.failIf('concerne2' in schema['Societe2'].subject_relations()) + self.failIf(self.index_exists('Societe2', 'name')) + self.commit() + self.failUnless('name' in schema['Societe2'].subject_relations()) + self.failUnless('concerne2' in schema['Societe2'].subject_relations()) + self.failUnless(self.index_exists('Societe2', 'name')) + # now we should be able to insert and query Societe2 + s2eid = self.execute('INSERT Societe2 X: X name "logilab"')[0][0] + self.execute('Societe2 X WHERE X name "logilab"') + self.execute('SET X concerne2 X WHERE X name "logilab"') + rset = self.execute('Any X WHERE X concerne2 Y') + self.assertEquals(rset.rows, [[s2eid]]) + # check that when a relation definition is deleted, existing relations are deleted + rdefeid = self.execute('INSERT CWRelation X: X cardinality "**", X relation_type RT, ' + ' X from_entity E, X to_entity E ' + 'WHERE RT name "concerne2", E name "CWUser"')[0][0] + self._set_perms(rdefeid) + self.commit() + self.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid}, 'x') + self.commit() + self.failUnless('concerne2' in schema['CWUser'].subject_relations()) + self.failIf('concerne2' in schema['Societe2'].subject_relations()) + self.failIf(self.execute('Any X WHERE X concerne2 Y')) + # schema should be cleaned on delete (after commit) + self.execute('DELETE CWEType X WHERE X name "Societe2"') + self.execute('DELETE CWRType X WHERE X name "concerne2"') + self.failUnless(self.index_exists('Societe2', 'name')) + self.failUnless(schema.has_entity('Societe2')) + self.failUnless(schema.has_relation('concerne2')) + self.commit() + self.failIf(self.index_exists('Societe2', 'name')) + self.failIf(schema.has_entity('Societe2')) + self.failIf(schema.has_entity('concerne2')) + self.failIf('concerne2' in schema['CWUser'].subject_relations()) + + def test_is_instance_of_insertions(self): + seid = self.execute('INSERT Transition T: T name "subdiv"')[0][0] + is_etypes = [etype for etype, in self.execute('Any ETN WHERE X eid %s, X is ET, ET name ETN' % seid)] + self.assertEquals(is_etypes, ['Transition']) + instanceof_etypes = [etype for etype, in self.execute('Any ETN WHERE X eid %s, X is_instance_of ET, ET name ETN' % seid)] + self.assertEquals(sorted(instanceof_etypes), ['BaseTransition', 'Transition']) + snames = [name for name, in self.execute('Any N WHERE S is BaseTransition, S name N')] + self.failIf('subdiv' in snames) + snames = [name for name, in self.execute('Any N WHERE S is_instance_of BaseTransition, S name N')] + self.failUnless('subdiv' in snames) + + + def test_perms_synchronization_1(self): + schema = self.repo.schema + self.assertEquals(schema['CWUser'].get_groups('read'), set(('managers', 'users'))) + self.failUnless(self.execute('Any X, Y WHERE X is CWEType, X name "CWUser", Y is CWGroup, Y name "users"')[0]) + self.execute('DELETE X read_permission Y WHERE X is CWEType, X name "CWUser", Y name "users"') + self.assertEquals(schema['CWUser'].get_groups('read'), set(('managers', 'users', ))) + self.commit() + self.assertEquals(schema['CWUser'].get_groups('read'), set(('managers', ))) + self.execute('SET X read_permission Y WHERE X is CWEType, X name "CWUser", Y name "users"') + self.commit() + self.assertEquals(schema['CWUser'].get_groups('read'), set(('managers', 'users',))) + + def test_perms_synchronization_2(self): + schema = self.repo.schema['in_group'].rdefs[('CWUser', 'CWGroup')] + self.assertEquals(schema.get_groups('read'), set(('managers', 'users', 'guests'))) + self.execute('DELETE X read_permission Y WHERE X relation_type RT, RT name "in_group", Y name "guests"') + self.assertEquals(schema.get_groups('read'), set(('managers', 'users', 'guests'))) + self.commit() + self.assertEquals(schema.get_groups('read'), set(('managers', 'users'))) + self.execute('SET X read_permission Y WHERE X relation_type RT, RT name "in_group", Y name "guests"') + self.assertEquals(schema.get_groups('read'), set(('managers', 'users'))) + self.commit() + self.assertEquals(schema.get_groups('read'), set(('managers', 'users', 'guests'))) + + def test_nonregr_user_edit_itself(self): + ueid = self.session.user.eid + groupeids = [eid for eid, in self.execute('CWGroup G WHERE G name in ("managers", "users")')] + self.execute('DELETE X in_group Y WHERE X eid %s' % ueid) + self.execute('SET X surname "toto" WHERE X eid %s' % ueid) + self.execute('SET X in_group Y WHERE X eid %s, Y name "managers"' % ueid) + self.commit() + eeid = self.execute('Any X WHERE X is CWEType, X name "CWEType"')[0][0] + self.execute('DELETE X read_permission Y WHERE X eid %s' % eeid) + self.execute('SET X final FALSE WHERE X eid %s' % eeid) + self.execute('SET X read_permission Y WHERE X eid %s, Y eid in (%s, %s)' + % (eeid, groupeids[0], groupeids[1])) + self.commit() + self.execute('Any X WHERE X is CWEType, X name "CWEType"') + + # schema modification hooks tests ######################################### + + def test_uninline_relation(self): + self.session.set_pool() + dbhelper = self.session.pool.source('system').dbhelper + sqlcursor = self.session.pool['system'] + self.failUnless(self.schema['state_of'].inlined) + try: + self.execute('SET X inlined FALSE WHERE X name "state_of"') + self.failUnless(self.schema['state_of'].inlined) + self.commit() + self.failIf(self.schema['state_of'].inlined) + self.failIf(self.index_exists('State', 'state_of')) + rset = self.execute('Any X, Y WHERE X state_of Y') + self.assertEquals(len(rset), 2) # user states + finally: + self.execute('SET X inlined TRUE WHERE X name "state_of"') + self.failIf(self.schema['state_of'].inlined) + self.commit() + self.failUnless(self.schema['state_of'].inlined) + self.failUnless(self.index_exists('State', 'state_of')) + rset = self.execute('Any X, Y WHERE X state_of Y') + self.assertEquals(len(rset), 2) + + def test_indexed_change(self): + self.session.set_pool() + dbhelper = self.session.pool.source('system').dbhelper + sqlcursor = self.session.pool['system'] + try: + self.execute('SET X indexed FALSE WHERE X relation_type R, R name "name"') + self.failUnless(self.schema['name'].rdef('Workflow', 'String').indexed) + self.failUnless(self.index_exists('Workflow', 'name')) + self.commit() + self.failIf(self.schema['name'].rdef('Workflow', 'String').indexed) + self.failIf(self.index_exists('Workflow', 'name')) + finally: + self.execute('SET X indexed TRUE WHERE X relation_type R, R name "name"') + self.failIf(self.schema['name'].rdef('Workflow', 'String').indexed) + self.failIf(self.index_exists('Workflow', 'name')) + self.commit() + self.failUnless(self.schema['name'].rdef('Workflow', 'String').indexed) + self.failUnless(self.index_exists('Workflow', 'name')) + + def test_unique_change(self): + self.session.set_pool() + dbhelper = self.session.pool.source('system').dbhelper + sqlcursor = self.session.pool['system'] + try: + self.execute('INSERT CWConstraint X: X cstrtype CT, DEF constrained_by X ' + 'WHERE CT name "UniqueConstraint", DEF relation_type RT, DEF from_entity E,' + 'RT name "name", E name "Workflow"') + self.failIf(self.schema['Workflow'].has_unique_values('name')) + self.failIf(self.index_exists('Workflow', 'name', unique=True)) + self.commit() + self.failUnless(self.schema['Workflow'].has_unique_values('name')) + self.failUnless(self.index_exists('Workflow', 'name', unique=True)) + finally: + self.execute('DELETE DEF constrained_by X WHERE X cstrtype CT, ' + 'CT name "UniqueConstraint", DEF relation_type RT, DEF from_entity E,' + 'RT name "name", E name "Workflow"') + self.failUnless(self.schema['Workflow'].has_unique_values('name')) + self.failUnless(self.index_exists('Workflow', 'name', unique=True)) + self.commit() + self.failIf(self.schema['Workflow'].has_unique_values('name')) + self.failIf(self.index_exists('Workflow', 'name', unique=True)) + + def test_required_change_1(self): + self.execute('SET DEF cardinality "?1" ' + 'WHERE DEF relation_type RT, DEF from_entity E,' + 'RT name "title", E name "Bookmark"') + self.commit() + # should now be able to add bookmark without title + self.execute('INSERT Bookmark X: X path "/view"') + self.commit() + + def test_required_change_2(self): + self.execute('SET DEF cardinality "11" ' + 'WHERE DEF relation_type RT, DEF from_entity E,' + 'RT name "surname", E name "CWUser"') + self.commit() + # should not be able anymore to add cwuser without surname + self.assertRaises(ValidationError, self.create_user, "toto") + self.execute('SET DEF cardinality "?1" ' + 'WHERE DEF relation_type RT, DEF from_entity E,' + 'RT name "surname", E name "CWUser"') + self.commit() + + + def test_add_attribute_to_base_class(self): + attreid = self.execute('INSERT CWAttribute X: X cardinality "11", X defaultval "noname", X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F ' + 'WHERE RT name "messageid", E name "BaseTransition", F name "String"')[0][0] + assert self.execute('SET X read_permission Y WHERE X eid %(x)s, Y name "managers"', + {'x': attreid}, 'x') + self.commit() + self.schema.rebuild_infered_relations() + self.failUnless('Transition' in self.schema['messageid'].subjects()) + self.failUnless('WorkflowTransition' in self.schema['messageid'].subjects()) + self.execute('Any X WHERE X is_instance_of BaseTransition, X messageid "hop"') + +if __name__ == '__main__': + unittest_main() diff -r 15d541321a8c -r 74c1597f8a82 hooks/workflow.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hooks/workflow.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,348 @@ +"""Core hooks: workflow related hooks + +:organization: Logilab +:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +from datetime import datetime + +from cubicweb import RepositoryError, ValidationError +from cubicweb.interfaces import IWorkflowable +from cubicweb.selectors import implements +from cubicweb.server import hook +from cubicweb.entities.wfobjs import WorkflowTransition + + +def _change_state(session, x, oldstate, newstate): + nocheck = session.transaction_data.setdefault('skip-security', set()) + nocheck.add((x, 'in_state', oldstate)) + nocheck.add((x, 'in_state', newstate)) + # delete previous state first in case we're using a super session, + # unless in_state isn't stored in the system source + fromsource = session.describe(x)[1] + if fromsource == 'system' or \ + not session.repo.sources_by_uri[fromsource].support_relation('in_state'): + session.delete_relation(x, 'in_state', oldstate) + session.add_relation(x, 'in_state', newstate) + + +# operations ################################################################### + +class _SetInitialStateOp(hook.Operation): + """make initial state be a default state""" + + def precommit_event(self): + session = self.session + entity = self.entity + # if there is an initial state and the entity's state is not set, + # use the initial state as a default state + if not (session.deleted_in_transaction(entity.eid) or entity.in_state) \ + and entity.current_workflow: + state = entity.current_workflow.initial + if state: + # use super session to by-pass security checks + session.super_session.add_relation(entity.eid, 'in_state', + state.eid) + + +class _FireAutotransitionOp(hook.Operation): + """try to fire auto transition after state changes""" + + def precommit_event(self): + session = self.session + entity = self.entity + autotrs = list(entity.possible_transitions('auto')) + if autotrs: + assert len(autotrs) == 1 + entity.fire_transition(autotrs[0]) + + +class _WorkflowChangedOp(hook.Operation): + """fix entity current state when changing its workflow""" + + def precommit_event(self): + # notice that enforcement that new workflow apply to the entity's type is + # done by schema rule, no need to check it here + session = self.session + pendingeids = session.transaction_data.get('pendingeids', ()) + if self.eid in pendingeids: + return + entity = session.entity_from_eid(self.eid) + # check custom workflow has not been rechanged to another one in the same + # transaction + mainwf = entity.main_workflow + if mainwf.eid == self.wfeid: + deststate = mainwf.initial + if not deststate: + msg = session._('workflow has no initial state') + raise ValidationError(entity.eid, {'custom_workflow': msg}) + if mainwf.state_by_eid(entity.current_state.eid): + # nothing to do + return + # if there are no history, simply go to new workflow's initial state + if not entity.workflow_history: + if entity.current_state.eid != deststate.eid: + _change_state(session, entity.eid, + entity.current_state.eid, deststate.eid) + return + msg = session._('workflow changed to "%s"') + msg %= session._(mainwf.name) + session.transaction_data[(entity.eid, 'customwf')] = self.wfeid + entity.change_state(deststate, msg, u'text/plain') + + +class _CheckTrExitPoint(hook.Operation): + + def precommit_event(self): + tr = self.session.entity_from_eid(self.treid) + outputs = set() + for ep in tr.subworkflow_exit: + if ep.subwf_state.eid in outputs: + msg = self.session._("can't have multiple exits on the same state") + raise ValidationError(self.treid, {'subworkflow_exit': msg}) + outputs.add(ep.subwf_state.eid) + + +class _SubWorkflowExitOp(hook.Operation): + + def precommit_event(self): + session = self.session + forentity = self.forentity + trinfo = self.trinfo + # we're in a subworkflow, check if we've reached an exit point + wftr = forentity.subworkflow_input_transition() + if wftr is None: + # inconsistency detected + msg = session._("state doesn't belong to entity's current workflow") + raise ValidationError(self.trinfo.eid, {'to_state': msg}) + tostate = wftr.get_exit_point(forentity, trinfo['to_state']) + if tostate is not None: + # reached an exit point + msg = session._('exiting from subworkflow %s') + msg %= session._(forentity.current_workflow.name) + session.transaction_data[(forentity.eid, 'subwfentrytr')] = True + # XXX iirk + req = forentity._cw + forentity._cw = session.super_session + try: + trinfo = forentity.change_state(tostate, msg, u'text/plain', + tr=wftr) + finally: + forentity._cw = req + + +# hooks ######################################################################## + +class WorkflowHook(hook.Hook): + __abstract__ = True + category = 'worfklow' + + +class SetInitialStateHook(WorkflowHook): + __regid__ = 'wfsetinitial' + __select__ = WorkflowHook.__select__ & implements(IWorkflowable) + events = ('after_add_entity',) + + def __call__(self): + _SetInitialStateOp(self._cw, entity=self.entity) + + +class PrepareStateChangeHook(WorkflowHook): + """record previous state information""" + __regid__ = 'cwdelstate' + __select__ = WorkflowHook.__select__ & hook.match_rtype('in_state') + events = ('before_delete_relation',) + + def __call__(self): + self._cw.transaction_data.setdefault('pendingrelations', []).append( + (self.eidfrom, self.rtype, self.eidto)) + + +class FireTransitionHook(WorkflowHook): + """check the transition is allowed, add missing information. Expect that: + * wf_info_for inlined relation is set + * by_transition or to_state (managers only) inlined relation is set + """ + __regid__ = 'wffiretransition' + __select__ = WorkflowHook.__select__ & implements('TrInfo') + events = ('before_add_entity',) + + def __call__(self): + session = self._cw + entity = self.entity + # first retreive entity to which the state change apply + try: + foreid = entity['wf_info_for'] + except KeyError: + msg = session._('mandatory relation') + raise ValidationError(entity.eid, {'wf_info_for': msg}) + forentity = session.entity_from_eid(foreid) + # then check it has a workflow set, unless we're in the process of changing + # entity's workflow + if session.transaction_data.get((forentity.eid, 'customwf')): + wfeid = session.transaction_data[(forentity.eid, 'customwf')] + wf = session.entity_from_eid(wfeid) + else: + wf = forentity.current_workflow + if wf is None: + msg = session._('related entity has no workflow set') + raise ValidationError(entity.eid, {None: msg}) + # then check it has a state set + fromstate = forentity.current_state + if fromstate is None: + msg = session._('related entity has no state') + raise ValidationError(entity.eid, {None: msg}) + # True if we are coming back from subworkflow + swtr = session.transaction_data.pop((forentity.eid, 'subwfentrytr'), None) + cowpowers = session.is_super_session or 'managers' in session.user.groups + # no investigate the requested state change... + try: + treid = entity['by_transition'] + except KeyError: + # no transition set, check user is a manager and destination state + # is specified (and valid) + if not cowpowers: + msg = session._('mandatory relation') + raise ValidationError(entity.eid, {'by_transition': msg}) + deststateeid = entity.get('to_state') + if not deststateeid: + msg = session._('mandatory relation') + raise ValidationError(entity.eid, {'by_transition': msg}) + deststate = wf.state_by_eid(deststateeid) + if deststate is None: + msg = session._("state doesn't belong to entity's workflow") + raise ValidationError(entity.eid, {'to_state': msg}) + else: + # check transition is valid and allowed, unless we're coming back + # from subworkflow + tr = session.entity_from_eid(treid) + if swtr is None: + if tr is None: + msg = session._("transition doesn't belong to entity's workflow") + raise ValidationError(entity.eid, {'by_transition': msg}) + if not tr.has_input_state(fromstate): + msg = session._("transition %(tr)s isn't allowed from %(st)s") % { + 'tr': session._(tr.name), 'st': session._(fromstate.name)} + raise ValidationError(entity.eid, {'by_transition': msg}) + if not tr.may_be_fired(foreid): + msg = session._("transition may not be fired") + raise ValidationError(entity.eid, {'by_transition': msg}) + if entity.get('to_state'): + deststateeid = entity['to_state'] + if not cowpowers and deststateeid != tr.destination().eid: + msg = session._("transition isn't allowed") + raise ValidationError(entity.eid, {'by_transition': msg}) + if swtr is None: + deststate = session.entity_from_eid(deststateeid) + if not cowpowers and deststate is None: + msg = session._("state doesn't belong to entity's workflow") + raise ValidationError(entity.eid, {'to_state': msg}) + else: + deststateeid = tr.destination().eid + # everything is ok, add missing information on the trinfo entity + entity['from_state'] = fromstate.eid + entity['to_state'] = deststateeid + nocheck = session.transaction_data.setdefault('skip-security', set()) + nocheck.add((entity.eid, 'from_state', fromstate.eid)) + nocheck.add((entity.eid, 'to_state', deststateeid)) + _FireAutotransitionOp(session, entity=forentity) + + +class FiredTransitionHook(WorkflowHook): + """change related entity state""" + __regid__ = 'wffiretransition' + __select__ = WorkflowHook.__select__ & implements('TrInfo') + events = ('after_add_entity',) + + def __call__(self): + trinfo = self.entity + _change_state(self._cw, trinfo['wf_info_for'], + trinfo['from_state'], trinfo['to_state']) + forentity = self._cw.entity_from_eid(trinfo['wf_info_for']) + assert forentity.current_state.eid == trinfo['to_state'] + if forentity.main_workflow.eid != forentity.current_workflow.eid: + _SubWorkflowExitOp(self._cw, forentity=forentity, trinfo=trinfo) + + +class CheckInStateChangeAllowed(WorkflowHook): + """check state apply, in case of direct in_state change using unsafe_execute + """ + __regid__ = 'wfcheckinstate' + __select__ = WorkflowHook.__select__ & hook.match_rtype('in_state') + events = ('before_add_relation',) + + def __call__(self): + session = self._cw + nocheck = session.transaction_data.get('skip-security', ()) + if (self.eidfrom, 'in_state', self.eidto) in nocheck: + # state changed through TrInfo insertion, so we already know it's ok + return + entity = session.entity_from_eid(self.eidfrom) + mainwf = entity.main_workflow + if mainwf is None: + msg = session._('entity has no workflow set') + raise ValidationError(entity.eid, {None: msg}) + for wf in mainwf.iter_workflows(): + if wf.state_by_eid(self.eidto): + break + else: + msg = session._("state doesn't belong to entity's workflow. You may " + "want to set a custom workflow for this entity first.") + raise ValidationError(self.eidfrom, {'in_state': msg}) + if entity.current_workflow and wf.eid != entity.current_workflow.eid: + msg = session._("state doesn't belong to entity's current workflow") + raise ValidationError(self.eidfrom, {'in_state': msg}) + + +class SetModificationDateOnStateChange(WorkflowHook): + """update entity's modification date after changing its state""" + __regid__ = 'wfsyncmdate' + __select__ = WorkflowHook.__select__ & hook.match_rtype('in_state') + events = ('after_add_relation',) + + def __call__(self): + if self._cw.added_in_transaction(self.eidfrom): + # new entity, not needed + return + entity = self._cw.entity_from_eid(self.eidfrom) + try: + entity.set_attributes(modification_date=datetime.now(), + _cw_unsafe=True) + except RepositoryError, ex: + # usually occurs if entity is coming from a read-only source + # (eg ldap user) + self.warning('cant change modification date for %s: %s', entity, ex) + + +class CheckWorkflowTransitionExitPoint(WorkflowHook): + """check that there is no multiple exits from the same state""" + __regid__ = 'wfcheckwftrexit' + __select__ = WorkflowHook.__select__ & hook.match_rtype('subworkflow_exit') + events = ('after_add_relation',) + + def __call__(self): + _CheckTrExitPoint(self._cw, treid=self.eidfrom) + + +class SetCustomWorkflow(WorkflowHook): + __regid__ = 'wfsetcustom' + __select__ = WorkflowHook.__select__ & hook.match_rtype('custom_workflow') + events = ('after_add_relation',) + + def __call__(self): + _WorkflowChangedOp(self._cw, eid=self.eidfrom, wfeid=self.eidto) + + +class DelCustomWorkflow(SetCustomWorkflow): + __regid__ = 'wfdelcustom' + events = ('after_delete_relation',) + + def __call__(self): + entity = self._cw.entity_from_eid(self.eidfrom) + typewf = entity.cwetype_workflow() + if typewf is not None: + _WorkflowChangedOp(self._cw, eid=self.eidfrom, wfeid=typewf.eid) + diff -r 15d541321a8c -r 74c1597f8a82 i18n.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/i18n.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,99 @@ +"""Some i18n/gettext utilities. + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +import re +import os +import sys +from os.path import join, basename, splitext, exists +from glob import glob + +from cubicweb.toolsutils import create_dir + +def extract_from_tal(files, output_file): + """extract i18n strings from tal and write them into the given output file + using standard python gettext marker (_) + """ + output = open(output_file, 'w') + for filepath in files: + for match in re.finditer('i18n:(content|replace)="([^"]+)"', open(filepath).read()): + print >> output, '_("%s")' % match.group(2) + output.close() + + +def add_msg(w, msgid, msgctx=None): + """write an empty pot msgid definition""" + if isinstance(msgid, unicode): + msgid = msgid.encode('utf-8') + if msgctx: + if isinstance(msgctx, unicode): + msgctx = msgctx.encode('utf-8') + w('msgctxt "%s"\n' % msgctx) + msgid = msgid.replace('"', r'\"').splitlines() + if len(msgid) > 1: + w('msgid ""\n') + for line in msgid: + w('"%s"' % line.replace('"', r'\"')) + else: + w('msgid "%s"\n' % msgid[0]) + w('msgstr ""\n\n') + + +def execute(cmd): + """display the command, execute it and raise an Exception if returned + status != 0 + """ + from subprocess import call + print cmd.replace(os.getcwd() + os.sep, '') + status = call(cmd, shell=True) + if status != 0: + raise Exception('status = %s' % status) + + +def available_catalogs(i18ndir=None): + if i18ndir is None: + wildcard = '*.po' + else: + wildcard = join(i18ndir, '*.po') + for popath in glob(wildcard): + lang = splitext(basename(popath))[0] + yield lang, popath + + +def compile_i18n_catalogs(sourcedirs, destdir, langs): + """generate .mo files for a set of languages into the `destdir` i18n directory + """ + from logilab.common.fileutils import ensure_fs_mode + print '-> compiling %s catalogs...' % destdir + errors = [] + for lang in langs: + langdir = join(destdir, lang, 'LC_MESSAGES') + if not exists(langdir): + create_dir(langdir) + pofiles = [join(path, '%s.po' % lang) for path in sourcedirs] + pofiles = [pof for pof in pofiles if exists(pof)] + mergedpo = join(destdir, '%s_merged.po' % lang) + try: + # merge instance/cubes messages catalogs with the stdlib's one + execute('msgcat --use-first --sort-output --strict -o "%s" %s' + % (mergedpo, ' '.join('"%s"' % f for f in pofiles))) + # make sure the .mo file is writeable and compiles with *msgfmt* + applmo = join(destdir, lang, 'LC_MESSAGES', 'cubicweb.mo') + try: + ensure_fs_mode(applmo) + except OSError: + pass # suppose not exists + execute('msgfmt "%s" -o "%s"' % (mergedpo, applmo)) + except Exception, ex: + errors.append('while handling language %s: %s' % (lang, ex)) + try: + # clean everything + os.unlink(mergedpo) + except Exception: + continue + return errors diff -r 15d541321a8c -r 74c1597f8a82 i18n/fr.po --- a/i18n/fr.po Wed Jan 20 10:13:02 2010 +0100 +++ b/i18n/fr.po Wed Jan 20 10:13:45 2010 +0100 @@ -1169,7 +1169,7 @@ msgstr "une erreur est survenue, la requête ne peut être complétée" msgid "and linked" -msgstr "et lié" +msgstr "et liée" msgid "and/or between different values" msgstr "et/ou entre les différentes valeurs" diff -r 15d541321a8c -r 74c1597f8a82 interfaces.py --- a/interfaces.py Wed Jan 20 10:13:02 2010 +0100 +++ b/interfaces.py Wed Jan 20 10:13:45 2010 +0100 @@ -56,7 +56,7 @@ class IProgress(Interface): """something that has a cost, a state and a progression - Take a look at cubicweb.common.mixins.ProgressMixIn for some + Take a look at cubicweb.mixins.ProgressMixIn for some default implementations """ diff -r 15d541321a8c -r 74c1597f8a82 mail.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mail.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,272 @@ +"""Common utilies to format / semd emails. + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +from base64 import b64encode, b64decode +from itertools import repeat +from time import time +from email.MIMEMultipart import MIMEMultipart +from email.MIMEText import MIMEText +from email.MIMEImage import MIMEImage +from email.Header import Header +try: + from socket import gethostname +except ImportError: + def gethostname(): # gae + return 'XXX' + +from cubicweb.view import EntityView +from cubicweb.entity import Entity + +def header(ustring): + return Header(ustring.encode('UTF-8'), 'UTF-8') + +def addrheader(uaddr, uname=None): + # even if an email address should be ascii, encode it using utf8 since + # automatic tests may generate non ascii email address + addr = uaddr.encode('UTF-8') + if uname: + return '%s <%s>' % (header(uname).encode(), addr) + return addr + + +def construct_message_id(appid, eid, withtimestamp=True): + if withtimestamp: + addrpart = 'eid=%s×tamp=%.10f' % (eid, time()) + else: + addrpart = 'eid=%s' % eid + # we don't want any equal sign nor trailing newlines + leftpart = b64encode(addrpart, '.-').rstrip().rstrip('=') + return '<%s@%s.%s>' % (leftpart, appid, gethostname()) + + +def parse_message_id(msgid, appid): + if msgid[0] == '<': + msgid = msgid[1:] + if msgid[-1] == '>': + msgid = msgid[:-1] + try: + values, qualif = msgid.split('@') + padding = len(values) % 4 + values = b64decode(str(values + '='*padding), '.-') + values = dict(v.split('=') for v in values.split('&')) + fromappid, host = qualif.split('.', 1) + except: + return None + if appid != fromappid or host != gethostname(): + return None + return values + + +def format_mail(uinfo, to_addrs, content, subject="", + cc_addrs=(), msgid=None, references=(), config=None): + """Sends an Email to 'e_addr' with content 'content', and subject 'subject' + + to_addrs and cc_addrs are expected to be a list of email address without + name + """ + assert type(content) is unicode, repr(content) + msg = MIMEText(content.encode('UTF-8'), 'plain', 'UTF-8') + # safety: keep only the first newline + subject = subject.splitlines()[0] + msg['Subject'] = header(subject) + if uinfo.get('email'): + email = uinfo['email'] + elif config and config['sender-addr']: + email = unicode(config['sender-addr']) + else: + email = u'' + if uinfo.get('name'): + name = uinfo['name'] + elif config and config['sender-addr']: + name = unicode(config['sender-name']) + else: + name = u'' + msg['From'] = addrheader(email, name) + if config and config['sender-addr'] and config['sender-addr'] != email: + appaddr = addrheader(config['sender-addr'], config['sender-name']) + msg['Reply-to'] = '%s, %s' % (msg['From'], appaddr) + elif email: + msg['Reply-to'] = msg['From'] + if config is not None: + msg['X-CW'] = config.appid + unique_addrs = lambda addrs: sorted(set(addr for addr in addrs if addr is not None)) + msg['To'] = ', '.join(addrheader(addr) for addr in unique_addrs(to_addrs)) + if cc_addrs: + msg['Cc'] = ', '.join(addrheader(addr) for addr in unique_addrs(cc_addrs)) + if msgid: + msg['Message-id'] = msgid + if references: + msg['References'] = ', '.join(references) + return msg + + +class HtmlEmail(MIMEMultipart): + + def __init__(self, subject, textcontent, htmlcontent, + sendermail=None, sendername=None, recipients=None, ccrecipients=None): + MIMEMultipart.__init__(self, 'related') + self['Subject'] = header(subject) + self.preamble = 'This is a multi-part message in MIME format.' + # Attach alternative text message + alternative = MIMEMultipart('alternative') + self.attach(alternative) + msgtext = MIMEText(textcontent.encode('UTF-8'), 'plain', 'UTF-8') + alternative.attach(msgtext) + # Attach html message + msghtml = MIMEText(htmlcontent.encode('UTF-8'), 'html', 'UTF-8') + alternative.attach(msghtml) + if sendermail or sendername: + self['From'] = addrheader(sendermail, sendername) + if recipients: + self['To'] = ', '.join(addrheader(addr) for addr in recipients if addr is not None) + if ccrecipients: + self['Cc'] = ', '.join(addrheader(addr) for addr in ccrecipients if addr is not None) + + def attach_image(self, data, htmlId): + image = MIMEImage(data) + image.add_header('Content-ID', '<%s>' % htmlId) + self.attach(image) + + +class NotificationView(EntityView): + """abstract view implementing the "email" API (eg to simplify sending + notification) + """ + # XXX refactor this class to work with len(rset) > 1 + + msgid_timestamp = True + + # this is usually the method to call + def render_and_send(self, **kwargs): + """generate and send an email message for this view""" + delayed = kwargs.pop('delay_to_commit', None) + for recipients, msg in self.render_emails(**kwargs): + if delayed is None: + self.send(recipients, msg) + elif delayed: + self.send_on_commit(recipients, msg) + else: + self.send_now(recipients, msg) + + def cell_call(self, row, col=0, **kwargs): + self.w(self._cw._(self.content) % self.context(**kwargs)) + + def render_emails(self, **kwargs): + """generate and send emails for this view (one per recipient)""" + self._kwargs = kwargs + recipients = self.recipients() + if not recipients: + self.info('skipping %s notification, no recipients', self.__regid__) + return + if self.cw_rset is not None: + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + # if the view is using timestamp in message ids, no way to reference + # previous email + if not self.msgid_timestamp: + refs = [self.construct_message_id(eid) + for eid in entity.notification_references(self)] + else: + refs = () + msgid = self.construct_message_id(entity.eid) + else: + refs = () + msgid = None + req = self._cw + self.user_data = req.user_data() + origlang = req.lang + for something in recipients: + if isinstance(something, Entity): + # hi-jack self._cw to get a session for the returned user + self._cw = self._cw.hijack_user(something) + emailaddr = something.get_email() + else: + emailaddr, lang = something + self._cw.set_language(lang) + # since the same view (eg self) may be called multiple time and we + # need a fresh stream at each iteration, reset it explicitly + self.w = None + # XXX call render before subject to set .row/.col attributes on the + # view + try: + content = self.render(row=0, col=0, **kwargs) + subject = self.subject() + except SkipEmail: + continue + except Exception, ex: + # shouldn't make the whole transaction fail because of rendering + # error (unauthorized or such) + self.exception(str(ex)) + continue + msg = format_mail(self.user_data, [emailaddr], content, subject, + config=self._cw.vreg.config, msgid=msgid, references=refs) + yield [emailaddr], msg + # restore language + req.set_language(origlang) + + # recipients / email sending ############################################### + + def recipients(self): + """return a list of either 2-uple (email, language) or user entity to + who this email should be sent + """ + # use super_session when available, we don't want to consider security + # when selecting recipients_finder + try: + req = self._cw.super_session + except AttributeError: + req = self._cw + finder = self._cw.vreg['components'].select('recipients_finder', req, + rset=self.cw_rset, + row=self.cw_row or 0, + col=self.cw_col or 0) + return finder.recipients() + + def send_now(self, recipients, msg): + self._cw.vreg.config.sendmails([(msg, recipients)]) + + def send_on_commit(self, recipients, msg): + raise NotImplementedError + + send = send_now + + # email generation helpers ################################################# + + def construct_message_id(self, eid): + return construct_message_id(self._cw.vreg.config.appid, eid, self.msgid_timestamp) + + def format_field(self, attr, value): + return ':%(attr)s: %(value)s' % {'attr': attr, 'value': value} + + def format_section(self, attr, value): + return '%(attr)s\n%(ul)s\n%(value)s\n' % { + 'attr': attr, 'ul': '-'*len(attr), 'value': value} + + def subject(self): + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + subject = self._cw._(self.message) + etype = entity.dc_type() + eid = entity.eid + login = self.user_data['login'] + return self._cw._('%(subject)s %(etype)s #%(eid)s (%(login)s)') % locals() + + def context(self, **kwargs): + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + for key, val in kwargs.iteritems(): + if val and isinstance(val, unicode) and val.strip(): + kwargs[key] = self._cw._(val) + kwargs.update({'user': self.user_data['login'], + 'eid': entity.eid, + 'etype': entity.dc_type(), + 'url': entity.absolute_url(), + 'title': entity.dc_long_title(),}) + return kwargs + + +class SkipEmail(Exception): + """raise this if you decide to skip an email during its generation""" diff -r 15d541321a8c -r 74c1597f8a82 migration.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/migration.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,373 @@ +"""utilities for instances migration + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +import sys +import os +import logging +import tempfile +from os.path import exists, join, basename, splitext + +from logilab.common.decorators import cached +from logilab.common.configuration import REQUIRED, read_old_config +from logilab.common.shellutils import ASK + +from cubicweb import ConfigurationError + + +def filter_scripts(config, directory, fromversion, toversion, quiet=True): + """return a list of paths of migration files to consider to upgrade + from a version to a greater one + """ + from logilab.common.changelog import Version # doesn't work with appengine + assert fromversion + assert toversion + assert isinstance(fromversion, tuple), fromversion.__class__ + assert isinstance(toversion, tuple), toversion.__class__ + assert fromversion <= toversion, (fromversion, toversion) + if not exists(directory): + if not quiet: + print directory, "doesn't exists, no migration path" + return [] + if fromversion == toversion: + return [] + result = [] + for fname in os.listdir(directory): + if fname.endswith('.pyc') or fname.endswith('.pyo') \ + or fname.endswith('~'): + continue + fpath = join(directory, fname) + try: + tver, mode = fname.split('_', 1) + except ValueError: + continue + mode = mode.split('.', 1)[0] + if not config.accept_mode(mode): + continue + try: + tver = Version(tver) + except ValueError: + continue + if tver <= fromversion: + continue + if tver > toversion: + continue + result.append((tver, fpath)) + # be sure scripts are executed in order + return sorted(result) + + +IGNORED_EXTENSIONS = ('.swp', '~') + + +def execscript_confirm(scriptpath): + """asks for confirmation before executing a script and provides the + ability to show the script's content + """ + while True: + answer = ASK.ask('Execute %r ?' % scriptpath, ('Y','n','show'), 'Y') + if answer == 'n': + return False + elif answer == 'show': + stream = open(scriptpath) + scriptcontent = stream.read() + stream.close() + print + print scriptcontent + print + else: + return True + +def yes(*args, **kwargs): + return True + + +class MigrationHelper(object): + """class holding CubicWeb Migration Actions used by migration scripts""" + + def __init__(self, config, interactive=True, verbosity=1): + self.config = config + if config: + # no config on shell to a remote instance + self.config.init_log(logthreshold=logging.ERROR, debug=True) + # 0: no confirmation, 1: only main commands confirmed, 2 ask for everything + self.verbosity = verbosity + self.need_wrap = True + if not interactive or not verbosity: + self.confirm = yes + self.execscript_confirm = yes + else: + self.execscript_confirm = execscript_confirm + self._option_changes = [] + self.__context = {'confirm': self.confirm, + 'config': self.config, + 'interactive_mode': interactive, + } + + def __getattribute__(self, name): + try: + return object.__getattribute__(self, name) + except AttributeError: + cmd = 'cmd_%s' % name + if hasattr(self, cmd): + meth = getattr(self, cmd) + return lambda *args, **kwargs: self.interact(args, kwargs, + meth=meth) + raise + raise AttributeError(name) + + def repo_connect(self): + return self.config.repository() + + def migrate(self, vcconf, toupgrade, options): + """upgrade the given set of cubes + + `cubes` is an ordered list of 3-uple: + (cube, fromversion, toversion) + """ + if options.fs_only: + # monkey path configuration.accept_mode so database mode (e.g. Any) + # won't be accepted + orig_accept_mode = self.config.accept_mode + def accept_mode(mode): + if mode == 'Any': + return False + return orig_accept_mode(mode) + self.config.accept_mode = accept_mode + # may be an iterator + toupgrade = tuple(toupgrade) + vmap = dict( (cube, (fromver, tover)) for cube, fromver, tover in toupgrade) + ctx = self.__context + ctx['versions_map'] = vmap + if self.config.accept_mode('Any') and 'cubicweb' in vmap: + migrdir = self.config.migration_scripts_dir() + self.cmd_process_script(join(migrdir, 'bootstrapmigration_repository.py')) + for cube, fromversion, toversion in toupgrade: + if cube == 'cubicweb': + migrdir = self.config.migration_scripts_dir() + else: + migrdir = self.config.cube_migration_scripts_dir(cube) + scripts = filter_scripts(self.config, migrdir, fromversion, toversion) + if scripts: + prevversion = None + for version, script in scripts: + # take care to X.Y.Z_Any.py / X.Y.Z_common.py: we've to call + # cube_upgraded once all script of X.Y.Z have been executed + if prevversion is not None and version != prevversion: + self.cube_upgraded(cube, prevversion) + prevversion = version + self.cmd_process_script(script) + self.cube_upgraded(cube, toversion) + else: + self.cube_upgraded(cube, toversion) + + def cube_upgraded(self, cube, version): + pass + + def shutdown(self): + pass + + def interact(self, args, kwargs, meth): + """execute the given method according to user's confirmation""" + msg = 'Execute command: %s(%s) ?' % ( + meth.__name__[4:], + ', '.join([repr(arg) for arg in args] + + ['%s=%r' % (n,v) for n,v in kwargs.items()])) + if 'ask_confirm' in kwargs: + ask_confirm = kwargs.pop('ask_confirm') + else: + ask_confirm = True + if not ask_confirm or self.confirm(msg): + return meth(*args, **kwargs) + + def confirm(self, question, shell=True, abort=True, retry=False, default='y'): + """ask for confirmation and return true on positive answer + + if `retry` is true the r[etry] answer may return 2 + """ + possibleanswers = ['y','n'] + if abort: + possibleanswers.append('abort') + if shell: + possibleanswers.append('shell') + if retry: + possibleanswers.append('retry') + try: + answer = ASK.ask(question, possibleanswers, default) + except (EOFError, KeyboardInterrupt): + answer = 'abort' + if answer == 'n': + return False + if answer == 'retry': + return 2 + if answer == 'abort': + raise SystemExit(1) + if shell and answer == 'shell': + self.interactive_shell() + return self.confirm(question) + return True + + def interactive_shell(self): + self.confirm = yes + self.need_wrap = False + # avoid '_' to be added to builtins by sys.display_hook + def do_not_add___to_builtins(obj): + if obj is not None: + print repr(obj) + sys.displayhook = do_not_add___to_builtins + local_ctx = self._create_context() + try: + import readline + from rlcompleter import Completer + except ImportError: + # readline not available + pass + else: + readline.set_completer(Completer(local_ctx).complete) + readline.parse_and_bind('tab: complete') + home_key = 'HOME' + if sys.platform == 'win32': + home_key = 'USERPROFILE' + histfile = os.path.join(os.environ[home_key], ".eshellhist") + try: + readline.read_history_file(histfile) + except IOError: + pass + from code import interact + banner = """entering the migration python shell +just type migration commands or arbitrary python code and type ENTER to execute it +type "exit" or Ctrl-D to quit the shell and resume operation""" + # give custom readfunc to avoid http://bugs.python.org/issue1288615 + def unicode_raw_input(prompt): + return unicode(raw_input(prompt), sys.stdin.encoding) + interact(banner, readfunc=unicode_raw_input, local=local_ctx) + readline.write_history_file(histfile) + # delete instance's confirm attribute to avoid questions + del self.confirm + self.need_wrap = True + + @cached + def _create_context(self): + """return a dictionary to use as migration script execution context""" + context = self.__context + for attr in dir(self): + if attr.startswith('cmd_'): + if self.need_wrap: + context[attr[4:]] = getattr(self, attr[4:]) + else: + context[attr[4:]] = getattr(self, attr) + return context + + def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs): + """execute a migration script + in interactive mode, display the migration script path, ask for + confirmation and execute it if confirmed + """ + migrscript = os.path.normpath(migrscript) + if migrscript.endswith('.py'): + script_mode = 'python' + elif migrscript.endswith('.txt') or migrscript.endswith('.rst'): + script_mode = 'doctest' + else: + raise Exception('This is not a valid cubicweb shell input') + if not self.execscript_confirm(migrscript): + return + scriptlocals = self._create_context().copy() + if script_mode == 'python': + if funcname is None: + pyname = '__main__' + else: + pyname = splitext(basename(migrscript))[0] + scriptlocals.update({'__file__': migrscript, '__name__': pyname}) + execfile(migrscript, scriptlocals) + if funcname is not None: + try: + func = scriptlocals[funcname] + self.info('found %s in locals', funcname) + assert callable(func), '%s (%s) is not callable' % (func, funcname) + except KeyError: + self.critical('no %s in script %s', funcname, migrscript) + return None + return func(*args, **kwargs) + else: # script_mode == 'doctest' + import doctest + doctest.testfile(migrscript, module_relative=False, + optionflags=doctest.ELLIPSIS, globs=scriptlocals) + + def cmd_option_renamed(self, oldname, newname): + """a configuration option has been renamed""" + self._option_changes.append(('renamed', oldname, newname)) + + def cmd_option_group_change(self, option, oldgroup, newgroup): + """a configuration option has been moved in another group""" + self._option_changes.append(('moved', option, oldgroup, newgroup)) + + def cmd_option_added(self, optname): + """a configuration option has been added""" + self._option_changes.append(('added', optname)) + + def cmd_option_removed(self, optname): + """a configuration option has been removed""" + # can safely be ignored + #self._option_changes.append(('removed', optname)) + + def cmd_option_type_changed(self, optname, oldtype, newvalue): + """a configuration option's type has changed""" + self._option_changes.append(('typechanged', optname, oldtype, newvalue)) + + def cmd_add_cubes(self, cubes): + """modify the list of used cubes in the in-memory config + returns newly inserted cubes, including dependencies + """ + if isinstance(cubes, basestring): + cubes = (cubes,) + origcubes = self.config.cubes() + newcubes = [p for p in self.config.expand_cubes(cubes) + if not p in origcubes] + if newcubes: + for cube in cubes: + assert cube in newcubes + self.config.add_cubes(newcubes) + return newcubes + + def cmd_remove_cube(self, cube, removedeps=False): + if removedeps: + toremove = self.config.expand_cubes([cube]) + else: + toremove = (cube,) + origcubes = self.config._cubes + basecubes = [c for c in origcubes if not c in toremove] + self.config._cubes = tuple(self.config.expand_cubes(basecubes)) + removed = [p for p in origcubes if not p in self.config._cubes] + if not cube in removed: + raise ConfigurationError("can't remove cube %s, " + "used as a dependency" % cube) + return removed + + def rewrite_configuration(self): + # import locally, show_diffs unavailable in gae environment + from cubicweb.toolsutils import show_diffs + configfile = self.config.main_config_file() + if self._option_changes: + read_old_config(self.config, self._option_changes, configfile) + fd, newconfig = tempfile.mkstemp() + for optdescr in self._option_changes: + if optdescr[0] == 'added': + optdict = self.config.get_option_def(optdescr[1]) + if optdict.get('default') is REQUIRED: + self.config.input_option(optdescr[1], optdict) + self.config.generate_config(open(newconfig, 'w')) + show_diffs(configfile, newconfig) + os.close(fd) + if exists(newconfig): + os.unlink(newconfig) + + +from logging import getLogger +from cubicweb import set_log_methods +set_log_methods(MigrationHelper, getLogger('cubicweb.migration')) diff -r 15d541321a8c -r 74c1597f8a82 misc/migration/3.5.0_Any.py --- a/misc/migration/3.5.0_Any.py Wed Jan 20 10:13:02 2010 +0100 +++ b/misc/migration/3.5.0_Any.py Wed Jan 20 10:13:45 2010 +0100 @@ -1,7 +1,7 @@ add_relation_type('prefered_form') rql('SET X prefered_form Y WHERE Y canonical TRUE, X identical_to Y') -checkpoint() +commit() drop_attribute('EmailAddress', 'canonical') drop_relation_definition('EmailAddress', 'identical_to', 'EmailAddress') diff -r 15d541321a8c -r 74c1597f8a82 misc/migration/bootstrapmigration_repository.py --- a/misc/migration/bootstrapmigration_repository.py Wed Jan 20 10:13:02 2010 +0100 +++ b/misc/migration/bootstrapmigration_repository.py Wed Jan 20 10:13:45 2010 +0100 @@ -10,12 +10,49 @@ applcubicwebversion, cubicwebversion = versions_map['cubicweb'] +if applcubicwebversion < (3, 6, 0) and cubicwebversion >= (3, 6, 0): + from cubicweb.server import schemaserial as ss + session.set_pool() + session.execute = session.unsafe_execute + permsdict = ss.deserialize_ertype_permissions(session) + def _add_relation_definition_no_perms(subjtype, rtype, objtype): + rschema = fsschema.rschema(rtype) + for query, args in ss.rdef2rql(rschema, subjtype, objtype, groupmap=None): + rql(query, args, ask_confirm=False) + commit(ask_confirm=False) + + config.disabled_hooks_categories.add('integrity') + for rschema in repo.schema.relations(): + rpermsdict = permsdict.get(rschema.eid, {}) + for rdef in rschema.rdefs.values(): + for action in ('read', 'add', 'delete'): + actperms = [] + for something in rpermsdict.get(action, ()): + if isinstance(something, tuple): + actperms.append(rdef.rql_expression(*something)) + else: # group name + actperms.append(something) + rdef.set_action_permissions(action, actperms) + for action in ('read', 'add', 'delete'): + _add_relation_definition_no_perms('CWRelation', '%s_permission' % action, 'CWGroup') + _add_relation_definition_no_perms('CWRelation', '%s_permission' % action, 'RQLExpression') + _add_relation_definition_no_perms('CWAttribute', '%s_permission' % action, 'CWGroup') + _add_relation_definition_no_perms('CWAttribute', '%s_permission' % action, 'RQLExpression') + for action in ('read', 'add', 'delete'): + rql('SET X %s_permission Y WHERE X is IN (CWAttribute, CWRelation), ' + 'RT %s_permission Y, X relation_type RT, Y is CWGroup' % (action, action)) + rql('INSERT RQLExpression Y: Y exprtype YET, Y mainvars YMV, Y expression YEX, ' + 'X %s_permission Y WHERE X is IN (CWAttribute, CWRelation), ' + 'X relation_type RT, RT %s_permission Y2, Y2 exprtype YET, ' + 'Y2 mainvars YMV, Y2 expression YEX' % (action, action)) + drop_relation_definition('CWRType', '%s_permission' % action, 'CWGroup', commit=False) + drop_relation_definition('CWRType', '%s_permission' % action, 'RQLExpression') + config.disabled_hooks_categories.add('integrity') + if applcubicwebversion < (3, 4, 0) and cubicwebversion >= (3, 4, 0): - from cubicweb import RepositoryError - from cubicweb.server.hooks import uniquecstrcheck_before_modification + session.set_shared_data('do-not-insert-cwuri', True) - repo.hm.unregister_hook(uniquecstrcheck_before_modification, 'before_add_entity', '') - repo.hm.unregister_hook(uniquecstrcheck_before_modification, 'before_update_entity', '') + deactivate_verification_hooks() add_relation_type('cwuri') base_url = session.base_url() # use an internal session since some entity might forbid modifications to admin @@ -26,8 +63,7 @@ isession.execute('SET X cwuri %(u)s WHERE X eid %(x)s', {'x': eid, 'u': base_url + u'eid/%s' % eid}) isession.commit() - repo.hm.register_hook(uniquecstrcheck_before_modification, 'before_add_entity', '') - repo.hm.register_hook(uniquecstrcheck_before_modification, 'before_update_entity', '') + reactivate_verification_hooks() session.set_shared_data('do-not-insert-cwuri', False) if applcubicwebversion < (3, 5, 0) and cubicwebversion >= (3, 5, 0): @@ -49,14 +85,22 @@ # drop explicit 'State allowed_transition Transition' since it should be # infered due to yams inheritance. However we've to disable the schema # sync hook first to avoid to destroy existing data... - from cubicweb.server.schemahooks import after_del_relation_type - repo.hm.unregister_hook(after_del_relation_type, - 'after_delete_relation', 'relation_type') try: - drop_relation_definition('State', 'allowed_transition', 'Transition') - finally: - repo.hm.register_hook(after_del_relation_type, - 'after_delete_relation', 'relation_type') + from cubicweb.hooks import syncschema + repo.vreg.unregister(syncschema.AfterDelRelationTypeHook) + try: + drop_relation_definition('State', 'allowed_transition', 'Transition') + finally: + repo.vreg.register(syncschema.AfterDelRelationTypeHook) + except ImportError: # syncschema is in CW >= 3.6 only + from cubicweb.server.schemahooks import after_del_relation_type + repo.hm.unregister_hook(after_del_relation_type, + 'after_delete_relation', 'relation_type') + try: + drop_relation_definition('State', 'allowed_transition', 'Transition') + finally: + repo.hm.register_hook(after_del_relation_type, + 'after_delete_relation', 'relation_type') schema.rebuild_infered_relations() # need to be explicitly called once everything is in place for et in rql('DISTINCT Any ET,ETN WHERE S state_of ET, ET name ETN', @@ -74,7 +118,7 @@ rql('DELETE TrInfo TI WHERE NOT TI from_state S') rql('SET TI by_transition T WHERE TI from_state FS, TI to_state TS, ' 'FS allowed_transition T, T destination_state TS') - checkpoint() + commit() drop_relation_definition('State', 'state_of', 'CWEType') drop_relation_definition('Transition', 'transition_of', 'CWEType') @@ -89,7 +133,7 @@ % table, ask_confirm=False): sql('UPDATE %s SET extid=%%(extid)s WHERE eid=%%(eid)s' % table, {'extid': b64encode(extid), 'eid': eid}, ask_confirm=False) - checkpoint() + commit() if applcubicwebversion < (3, 2, 0) and cubicwebversion >= (3, 2, 0): add_cube('card', update_database=False) diff -r 15d541321a8c -r 74c1597f8a82 mixins.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mixins.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,315 @@ +"""mixins of entity/views organized somewhat in a graph or tree structure + + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +from logilab.common.deprecation import deprecated +from logilab.common.decorators import cached + +from cubicweb import typed_eid +from cubicweb.selectors import implements +from cubicweb.interfaces import IEmailable, ITree + + +class TreeMixIn(object): + """base tree-mixin providing the tree interface + + This mixin has to be inherited explicitly and configured using the + tree_attribute, parent_target and children_target class attribute to + benefit from this default implementation + """ + tree_attribute = None + # XXX misnamed + parent_target = 'subject' + children_target = 'object' + + def different_type_children(self, entities=True): + """return children entities of different type as this entity. + + according to the `entities` parameter, return entity objects or the + equivalent result set + """ + res = self.related(self.tree_attribute, self.children_target, + entities=entities) + if entities: + return [e for e in res if e.e_schema != self.e_schema] + return res.filtered_rset(lambda x: x.e_schema != self.e_schema, self.cw_col) + + def same_type_children(self, entities=True): + """return children entities of the same type as this entity. + + according to the `entities` parameter, return entity objects or the + equivalent result set + """ + res = self.related(self.tree_attribute, self.children_target, + entities=entities) + if entities: + return [e for e in res if e.e_schema == self.e_schema] + return res.filtered_rset(lambda x: x.e_schema == self.e_schema, self.cw_col) + + def iterchildren(self, _done=None): + if _done is None: + _done = set() + for child in self.children(): + if child.eid in _done: + self.error('loop in %s tree', self.__regid__.lower()) + continue + yield child + _done.add(child.eid) + + def prefixiter(self, _done=None): + if _done is None: + _done = set() + if self.eid in _done: + return + yield self + _done.add(self.eid) + for child in self.iterchildren(_done): + try: + for entity in child.prefixiter(_done): + yield entity + except AttributeError: + pass + + @cached + def path(self): + """returns the list of eids from the root object to this object""" + path = [] + parent = self + while parent: + if parent.eid in path: + self.error('loop in %s tree', self.__regid__.lower()) + break + path.append(parent.eid) + try: + # check we are not leaving the tree + if (parent.tree_attribute != self.tree_attribute or + parent.parent_target != self.parent_target): + break + parent = parent.parent() + except AttributeError: + break + + path.reverse() + return path + + def iterparents(self): + def _uptoroot(self): + curr = self + while True: + curr = curr.parent() + if curr is None: + break + yield curr + return _uptoroot(self) + + def notification_references(self, view): + """used to control References field of email send on notification + for this entity. `view` is the notification view. + + Should return a list of eids which can be used to generate message ids + of previously sent email + """ + return self.path()[:-1] + + + ## ITree interface ######################################################## + def parent(self): + """return the parent entity if any, else None (e.g. if we are on the + root + """ + try: + return self.related(self.tree_attribute, self.parent_target, + entities=True)[0] + except (KeyError, IndexError): + return None + + def children(self, entities=True, sametype=False): + """return children entities + + according to the `entities` parameter, return entity objects or the + equivalent result set + """ + if sametype: + return self.same_type_children(entities) + else: + return self.related(self.tree_attribute, self.children_target, + entities=entities) + + def children_rql(self): + return self.related_rql(self.tree_attribute, self.children_target) + + def is_leaf(self): + return len(self.children()) == 0 + + def is_root(self): + return self.parent() is None + + def root(self): + """return the root object""" + return self._cw.entity_from_eid(self.path()[0]) + + +class EmailableMixIn(object): + """base mixin providing the default get_email() method used by + the massmailing view + + NOTE: The default implementation is based on the + primary_email / use_email scheme + """ + __implements__ = (IEmailable,) + + def get_email(self): + if getattr(self, 'primary_email', None): + return self.primary_email[0].address + if getattr(self, 'use_email', None): + return self.use_email[0].address + return None + + @classmethod + def allowed_massmail_keys(cls): + """returns a set of allowed email substitution keys + + The default is to return the entity's attribute list but an + entity class might override this method to allow extra keys. + For instance, the Person class might want to return a `companyname` + key. + """ + return set(rschema.type + for rschema, attrtype in cls.e_schema.attribute_definitions() + if attrtype.type not in ('Password', 'Bytes')) + + def as_email_context(self): + """returns the dictionary as used by the sendmail controller to + build email bodies. + + NOTE: the dictionary keys should match the list returned by the + `allowed_massmail_keys` method. + """ + return dict( (attr, getattr(self, attr)) for attr in self.allowed_massmail_keys() ) + + +"""pluggable mixins system: plug classes registered in MI_REL_TRIGGERS on entity +classes which have the relation described by the dict's key. + +NOTE: pluggable mixins can't override any method of the 'explicit' user classes tree +(eg without plugged classes). This includes bases Entity and AnyEntity classes. +""" +MI_REL_TRIGGERS = { + ('primary_email', 'subject'): EmailableMixIn, + ('use_email', 'subject'): EmailableMixIn, + } + + + +def _done_init(done, view, row, col): + """handle an infinite recursion safety belt""" + if done is None: + done = set() + entity = view.cw_rset.get_entity(row, col) + if entity.eid in done: + msg = entity._cw._('loop in %(rel)s relation (%(eid)s)') % { + 'rel': entity.tree_attribute, + 'eid': entity.eid + } + return None, msg + done.add(entity.eid) + return done, entity + + +class TreeViewMixIn(object): + """a recursive tree view""" + __regid__ = 'tree' + item_vid = 'treeitem' + __select__ = implements(ITree) + + def call(self, done=None, **kwargs): + if done is None: + done = set() + super(TreeViewMixIn, self).call(done=done, **kwargs) + + def cell_call(self, row, col=0, vid=None, done=None, **kwargs): + done, entity = _done_init(done, self, row, col) + if done is None: + # entity is actually an error message + self.w(u'
  • %s
  • ' % entity) + return + self.open_item(entity) + entity.view(vid or self.item_vid, w=self.w, **kwargs) + relatedrset = entity.children(entities=False) + self.wview(self.__regid__, relatedrset, 'null', done=done, **kwargs) + self.close_item(entity) + + def open_item(self, entity): + self.w(u'
  • \n' % entity.__regid__.lower()) + def close_item(self, entity): + self.w(u'
  • \n') + + +class TreePathMixIn(object): + """a recursive path view""" + __regid__ = 'path' + item_vid = 'oneline' + separator = u' > ' + + def call(self, **kwargs): + self.w(u'
    ') + super(TreePathMixIn, self).call(**kwargs) + self.w(u'
    ') + + def cell_call(self, row, col=0, vid=None, done=None, **kwargs): + done, entity = _done_init(done, self, row, col) + if done is None: + # entity is actually an error message + self.w(u'%s' % entity) + return + parent = entity.parent() + if parent: + parent.view(self.__regid__, w=self.w, done=done) + self.w(self.separator) + entity.view(vid or self.item_vid, w=self.w) + + +class ProgressMixIn(object): + """provide default implementations for IProgress interface methods""" + # This is an adapter isn't it ? + + @property + def cost(self): + return self.progress_info()['estimated'] + + @property + def revised_cost(self): + return self.progress_info().get('estimatedcorrected', self.cost) + + @property + def done(self): + return self.progress_info()['done'] + + @property + def todo(self): + return self.progress_info()['todo'] + + @cached + def progress_info(self): + raise NotImplementedError() + + def finished(self): + return not self.in_progress() + + def in_progress(self): + raise NotImplementedError() + + def progress(self): + try: + return 100. * self.done / self.revised_cost + except ZeroDivisionError: + # total cost is 0 : if everything was estimated, task is completed + if self.progress_info().get('notestimated'): + return 0. + return 100 diff -r 15d541321a8c -r 74c1597f8a82 mttransforms.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mttransforms.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,102 @@ +"""mime type transformation engine for cubicweb, based on mtconverter + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +from logilab import mtconverter + +from logilab.mtconverter.engine import TransformEngine +from logilab.mtconverter.transform import Transform +from logilab.mtconverter import (register_base_transforms, + register_pil_transforms, + register_pygments_transforms) + +from cubicweb.utils import UStringIO +from cubicweb.uilib import rest_publish, html_publish + +HTML_MIMETYPES = ('text/html', 'text/xhtml', 'application/xhtml+xml') + +# CubicWeb specific transformations + +class rest_to_html(Transform): + inputs = ('text/rest', 'text/x-rst') + output = 'text/html' + def _convert(self, trdata): + return rest_publish(trdata.appobject, trdata.decode()) + +class html_to_html(Transform): + inputs = HTML_MIMETYPES + output = 'text/html' + def _convert(self, trdata): + return html_publish(trdata.appobject, trdata.data) + + +# Instantiate and configure the transformation engine + +mtconverter.UNICODE_POLICY = 'replace' + +ENGINE = TransformEngine() +ENGINE.add_transform(rest_to_html()) +ENGINE.add_transform(html_to_html()) + +try: + from cubicweb.ext.tal import CubicWebContext, compile_template +except ImportError: + HAS_TAL = False + from cubicweb import schema + schema.NEED_PERM_FORMATS.remove('text/cubicweb-page-template') + +else: + HAS_TAL = True + + class ept_to_html(Transform): + inputs = ('text/cubicweb-page-template',) + output = 'text/html' + output_encoding = 'utf-8' + def _convert(self, trdata): + context = CubicWebContext() + appobject = trdata.appobject + context.update({'self': appobject, 'rset': appobject.cw_rset, + 'req': appobject._cw, + '_' : appobject._cw._, + 'user': appobject._cw.user}) + output = UStringIO() + template = compile_template(trdata.encode(self.output_encoding)) + template.expand(context, output) + return output.getvalue() + + ENGINE.add_transform(ept_to_html()) + +if register_pil_transforms(ENGINE, verb=False): + HAS_PIL_TRANSFORMS = True +else: + HAS_PIL_TRANSFORMS = False + +try: + from logilab.mtconverter.transforms import pygmentstransforms + for mt in ('text/plain',) + HTML_MIMETYPES: + try: + pygmentstransforms.mimetypes.remove(mt) + except ValueError: + continue + register_pygments_transforms(ENGINE, verb=False) + + def patch_convert(cls): + def _convert(self, trdata, origconvert=cls._convert): + try: + trdata.appobject._cw.add_css('pygments.css') + except AttributeError: # session has no add_css, only http request + pass + return origconvert(self, trdata) + cls._convert = _convert + patch_convert(pygmentstransforms.PygmentsHTMLTransform) + + HAS_PYGMENTS_TRANSFORMS = True +except ImportError: + HAS_PYGMENTS_TRANSFORMS = False + +register_base_transforms(ENGINE, verb=False) diff -r 15d541321a8c -r 74c1597f8a82 req.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/req.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,390 @@ +"""Base class for request/session + +:organization: Logilab +:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: Library General Public License version 2 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +from urllib import quote as urlquote, unquote as urlunquote +from datetime import time, datetime, timedelta + +from logilab.common.decorators import cached +from logilab.common.deprecation import deprecated + +from cubicweb import Unauthorized, RegistryException, typed_eid +from cubicweb.rset import ResultSet +from cubicweb.utils import ustrftime, strptime, todate, todatetime + +ONESECOND = timedelta(0, 1, 0) +CACHE_REGISTRY = {} + + +class Cache(dict): + def __init__(self): + super(Cache, self).__init__() + _now = datetime.now() + self.cache_creation_date = _now + self.latest_cache_lookup = _now + + +class RequestSessionBase(object): + """base class containing stuff shared by server session and web request + + request/session is the main resources accessor, mainly through it's vreg + attribute: + :vreg: + the instance's registry + :vreg.schema: + the instance's schema + :vreg.config: + the instance's configuration + """ + def __init__(self, vreg): + self.vreg = vreg + try: + encoding = vreg.property_value('ui.encoding') + except: # no vreg or property not registered + encoding = 'utf-8' + self.encoding = encoding + # cache result of execution for (rql expr / eids), + # should be emptied on commit/rollback of the server session / web + # connection + self.local_perm_cache = {} + self._ = unicode + + def property_value(self, key): + """return value of the property with the given key, giving priority to + user specific value if any, else using site value + """ + if self.user: + return self.user.property_value(key) + return self.vreg.property_value(key) + + def etype_rset(self, etype, size=1): + """return a fake result set for a particular entity type""" + rset = ResultSet([('A',)]*size, '%s X' % etype, + description=[(etype,)]*size) + def get_entity(row, col=0, etype=etype, req=self, rset=rset): + return req.vreg.etype_class(etype)(req, rset, row, col) + rset.get_entity = get_entity + return self.decorate_rset(rset) + + def eid_rset(self, eid, etype=None): + """return a result set for the given eid without doing actual query + (we have the eid, we can suppose it exists and user has access to the + entity) + """ + eid = typed_eid(eid) + if etype is None: + etype = self.describe(eid)[0] + rset = ResultSet([(eid,)], 'Any X WHERE X eid %(x)s', {'x': eid}, + [(etype,)]) + return self.decorate_rset(rset) + + def empty_rset(self): + """return a result set for the given eid without doing actual query + (we have the eid, we can suppose it exists and user has access to the + entity) + """ + return self.decorate_rset(ResultSet([], 'Any X WHERE X eid -1')) + + def entity_from_eid(self, eid, etype=None): + """return an entity instance for the given eid. No query is done""" + try: + return self.entity_cache(eid) + except KeyError: + rset = self.eid_rset(eid, etype) + entity = rset.get_entity(0, 0) + self.set_entity_cache(entity) + return entity + + def entity_cache(self, eid): + raise KeyError + + def set_entity_cache(self, entity): + pass + + # XXX move to CWEntityManager or even better as factory method (unclear + # where yet...) + + def create_entity(self, etype, _cw_unsafe=False, **kwargs): + """add a new entity of the given type + + Example (in a shell session): + + c = create_entity('Company', name=u'Logilab') + create_entity('Person', works_for=c, firstname=u'John', lastname=u'Doe') + + """ + if _cw_unsafe: + execute = self.unsafe_execute + else: + execute = self.execute + rql = 'INSERT %s X' % etype + relations = [] + restrictions = set() + cachekey = [] + pending_relations = [] + for attr, value in kwargs.items(): + if isinstance(value, (tuple, list, set, frozenset)): + if len(value) == 1: + value = iter(value).next() + else: + del kwargs[attr] + pending_relations.append( (attr, value) ) + continue + if hasattr(value, 'eid'): # non final relation + rvar = attr.upper() + # XXX safer detection of object relation + if attr.startswith('reverse_'): + relations.append('%s %s X' % (rvar, attr[len('reverse_'):])) + else: + relations.append('X %s %s' % (attr, rvar)) + restriction = '%s eid %%(%s)s' % (rvar, attr) + if not restriction in restrictions: + restrictions.add(restriction) + cachekey.append(attr) + kwargs[attr] = value.eid + else: # attribute + relations.append('X %s %%(%s)s' % (attr, attr)) + if relations: + rql = '%s: %s' % (rql, ', '.join(relations)) + if restrictions: + rql = '%s WHERE %s' % (rql, ', '.join(restrictions)) + created = execute(rql, kwargs, cachekey).get_entity(0, 0) + for attr, values in pending_relations: + if attr.startswith('reverse_'): + restr = 'Y %s X' % attr[len('reverse_'):] + else: + restr = 'X %s Y' % attr + execute('SET %s WHERE X eid %%(x)s, Y eid IN (%s)' % ( + restr, ','.join(str(r.eid) for r in values)), + {'x': created.eid}, 'x') + return created + + def ensure_ro_rql(self, rql): + """raise an exception if the given rql is not a select query""" + first = rql.split(' ', 1)[0].lower() + if first in ('insert', 'set', 'delete'): + raise Unauthorized(self._('only select queries are authorized')) + + def get_cache(self, cachename): + """ + NOTE: cachename should be dotted names as in : + - cubicweb.mycache + - cubes.blog.mycache + - etc. + """ + if cachename in CACHE_REGISTRY: + cache = CACHE_REGISTRY[cachename] + else: + cache = CACHE_REGISTRY[cachename] = Cache() + _now = datetime.now() + if _now > cache.latest_cache_lookup + ONESECOND: + ecache = self.execute( + 'Any C,T WHERE C is CWCache, C name %(name)s, C timestamp T', + {'name':cachename}).get_entity(0,0) + cache.latest_cache_lookup = _now + if not ecache.valid(cache.cache_creation_date): + cache.clear() + cache.cache_creation_date = _now + return cache + + # url generation methods ################################################## + + def build_url(self, *args, **kwargs): + """return an absolute URL using params dictionary key/values as URL + parameters. Values are automatically URL quoted, and the + publishing method to use may be specified or will be guessed. + """ + # use *args since we don't want first argument to be "anonymous" to + # avoid potential clash with kwargs + if args: + assert len(args) == 1, 'only 0 or 1 non-named-argument expected' + method = args[0] + else: + method = None + # XXX I (adim) think that if method is passed explicitly, we should + # not try to process it and directly call req.build_url() + if method is None: + if self.from_controller() == 'view' and not '_restpath' in kwargs: + method = self.relative_path(includeparams=False) or 'view' + else: + method = 'view' + base_url = kwargs.pop('base_url', None) + if base_url is None: + base_url = self.base_url() + if '_restpath' in kwargs: + assert method == 'view', method + path = kwargs.pop('_restpath') + else: + path = method + if not kwargs: + return u'%s%s' % (base_url, path) + return u'%s%s?%s' % (base_url, path, self.build_url_params(**kwargs)) + + + def build_url_params(self, **kwargs): + """return encoded params to incorporate them in an URL""" + args = [] + for param, values in kwargs.items(): + if not isinstance(values, (list, tuple)): + values = (values,) + for value in values: + args.append(u'%s=%s' % (param, self.url_quote(value))) + return '&'.join(args) + + def url_quote(self, value, safe=''): + """urllib.quote is not unicode safe, use this method to do the + necessary encoding / decoding. Also it's designed to quote each + part of a url path and so the '/' character will be encoded as well. + """ + if isinstance(value, unicode): + quoted = urlquote(value.encode(self.encoding), safe=safe) + return unicode(quoted, self.encoding) + return urlquote(str(value), safe=safe) + + def url_unquote(self, quoted): + """returns a unicode unquoted string + + decoding is based on `self.encoding` which is the encoding + used in `url_quote` + """ + if isinstance(quoted, unicode): + quoted = quoted.encode(self.encoding) + try: + return unicode(urlunquote(quoted), self.encoding) + except UnicodeDecodeError: # might occurs on manually typed URLs + return unicode(urlunquote(quoted), 'iso-8859-1') + + # bound user related methods ############################################### + + @cached + def user_data(self): + """returns a dictionnary with this user's information""" + userinfo = {} + if self.is_internal_session: + userinfo['login'] = "cubicweb" + userinfo['name'] = "cubicweb" + userinfo['email'] = "" + return userinfo + user = self.actual_session().user + userinfo['login'] = user.login + userinfo['name'] = user.name() + userinfo['email'] = user.get_email() + return userinfo + + def is_internal_session(self): + """overrided on the server-side""" + return False + + # formating methods ####################################################### + + def view(self, __vid, rset=None, __fallback_oid=None, __registry='views', + initargs=None, **kwargs): + """Select object with the given id (`__oid`) then render it. If the + object isn't selectable, try to select fallback object if + `__fallback_oid` is specified. + + If specified `initargs` is expected to be a dictionnary containing + arguments that should be given to selection (hence to object's __init__ + as well), but not to render(). Other arbitrary keyword arguments will be + given to selection *and* to render(), and so should be handled by + object's call or cell_call method.. + """ + if initargs is None: + initargs = kwargs + else: + initargs.update(kwargs) + try: + view = self.vreg[__registry].select(__vid, self, rset=rset, **initargs) + except RegistryException: + view = self.vreg[__registry].select(__fallback_oid, self, + rset=rset, **initargs) + return view.render(**kwargs) + + def format_date(self, date, date_format=None, time=False): + """return a string for a date time according to instance's + configuration + """ + if date: + if date_format is None: + if time: + date_format = self.property_value('ui.datetime-format') + else: + date_format = self.property_value('ui.date-format') + return ustrftime(date, date_format) + return u'' + + def format_time(self, time): + """return a string for a time according to instance's + configuration + """ + if time: + return ustrftime(time, self.property_value('ui.time-format')) + return u'' + + def format_float(self, num): + """return a string for floating point number according to instance's + configuration + """ + if num: + return self.property_value('ui.float-format') % num + return u'' + + def parse_datetime(self, value, etype='Datetime'): + """get a datetime or time from a string (according to etype) + Datetime formatted as Date are accepted + """ + assert etype in ('Datetime', 'Date', 'Time'), etype + # XXX raise proper validation error + if etype == 'Datetime': + format = self.property_value('ui.datetime-format') + try: + return todatetime(strptime(value, format)) + except ValueError: + pass + elif etype == 'Time': + format = self.property_value('ui.time-format') + try: + # (adim) I can't find a way to parse a Time with a custom format + date = strptime(value, format) # this returns a DateTime + return time(date.hour, date.minute, date.second) + except ValueError: + raise ValueError(self._('can\'t parse %(value)r (expected %(format)s)') + % {'value': value, 'format': format}) + try: + format = self.property_value('ui.date-format') + dt = strptime(value, format) + if etype == 'Datetime': + return todatetime(dt) + return todate(dt) + except ValueError: + raise ValueError(self._('can\'t parse %(value)r (expected %(format)s)') + % {'value': value, 'format': format}) + + # abstract methods to override according to the web front-end ############# + + def base_url(self): + """return the root url of the instance""" + raise NotImplementedError + + def decorate_rset(self, rset): + """add vreg/req (at least) attributes to the given result set """ + raise NotImplementedError + + def describe(self, eid): + """return a tuple (type, sourceuri, extid) for the entity with id """ + raise NotImplementedError + + @property + @deprecated('[3.6] use _cw.vreg.config') + def config(self): + return self.vreg.config + + @property + @deprecated('[3.6] use _cw.vreg.schema') + def schema(self): + return self.vreg.schema diff -r 15d541321a8c -r 74c1597f8a82 rqlrewrite.py --- a/rqlrewrite.py Wed Jan 20 10:13:02 2010 +0100 +++ b/rqlrewrite.py Wed Jan 20 10:13:45 2010 +0100 @@ -402,12 +402,12 @@ orel = self.varinfo['lhs_rels'][sniprel.r_type] cardindex = 0 ttypes_func = rschema.objects - rprop = rschema.rproperty + rdef = rschema.rdef else: # target == 'subject': orel = self.varinfo['rhs_rels'][sniprel.r_type] cardindex = 1 ttypes_func = rschema.subjects - rprop = lambda x, y, z: rschema.rproperty(y, x, z) + rdef = lambda x, y: rschema.rdef(y, x) except KeyError, ex: # may be raised by self.varinfo['xhs_rels'][sniprel.r_type] return None @@ -419,7 +419,7 @@ # variable from the original query for etype in self.varinfo['stinfo']['possibletypes']: for ttype in ttypes_func(etype): - if rprop(etype, ttype, 'cardinality')[cardindex] in '+*': + if rdef(etype, ttype).cardinality[cardindex] in '+*': return None return orel diff -r 15d541321a8c -r 74c1597f8a82 rset.py --- a/rset.py Wed Jan 20 10:13:02 2010 +0100 +++ b/rset.py Wed Jan 20 10:13:45 2010 +0100 @@ -9,7 +9,7 @@ from logilab.common.decorators import cached, clear_cache, copy_cache -from rql import nodes +from rql import nodes, stmts from cubicweb import NotAnEntity @@ -83,7 +83,7 @@ try: return self._rsetactions[key] except KeyError: - actions = self.vreg['actions'].possible_vobjects( + actions = self.vreg['actions'].poss_visible_objects( self.req, rset=self, **kwargs) self._rsetactions[key] = actions return actions @@ -243,8 +243,6 @@ rset = mapping[key] rset.rows.append(self.rows[idx]) rset.description.append(self.description[idx]) - - for rset in result: rset.rowcount = len(rset.rows) if return_dict: @@ -252,6 +250,51 @@ else: return result + def limited_rql(self): + """return a printable rql for the result set associated to the object, + with limit/offset correctly set according to maximum page size and + currently displayed page when necessary + """ + # try to get page boundaries from the navigation component + # XXX we should probably not have a ref to this component here (eg in + # cubicweb) + nav = self.vreg['components'].select_or_none('navigation', self.req, + rset=self) + if nav: + start, stop = nav.page_boundaries() + rql = self._limit_offset_rql(stop - start, start) + # result set may have be limited manually in which case navigation won't + # apply + elif self.limited: + rql = self._limit_offset_rql(*self.limited) + # navigation component doesn't apply and rset has not been limited, no + # need to limit query + else: + rql = self.printable_rql() + return rql + + def _limit_offset_rql(self, limit, offset): + rqlst = self.syntax_tree() + if len(rqlst.children) == 1: + select = rqlst.children[0] + olimit, ooffset = select.limit, select.offset + select.limit, select.offset = limit, offset + rql = rqlst.as_string(kwargs=self.args) + # restore original limit/offset + select.limit, select.offset = olimit, ooffset + else: + newselect = stmts.Select() + newselect.limit = limit + newselect.offset = offset + aliases = [nodes.VariableRef(newselect.get_variable(vref.name, i)) + for i, vref in enumerate(rqlst.selection)] + newselect.set_with([nodes.SubQuery(aliases, rqlst)], check=False) + newunion = stmts.Union() + newunion.append(newselect) + rql = rqlst.as_string(kwargs=self.args) + rqlst.parent = None + return rql + def limit(self, limit, offset=0, inplace=False): """limit the result set to the given number of rows optionaly starting from an index different than 0 @@ -282,9 +325,9 @@ # we also have to fix/remove from the request entity cache entities # which get a wrong rset reference by this limit call for entity in self.req.cached_entities(): - if entity.rset is self: - if offset <= entity.row < stop: - entity.row = entity.row - offset + if entity.cw_rset is self: + if offset <= entity.cw_row < stop: + entity.cw_row = entity.cw_row - offset else: self.req.drop_entity_cache(entity.eid) else: @@ -321,6 +364,14 @@ if self.rows[i][col] is not None: yield self.get_entity(i, col) + def complete_entity(self, row, col=0, skip_bytes=True): + """short cut to get an completed entity instance for a particular + row (all instance's attributes have been fetched) + """ + entity = self.get_entity(row, col) + entity.complete(skip_bytes=skip_bytes) + return entity + @cached def get_entity(self, row, col=None): """special method for query retreiving a single entity, returns a @@ -338,7 +389,7 @@ """ if col is None: from warnings import warn - msg = 'col parameter will become mandatory in future version' + msg = '[3.2] col parameter will become mandatory in future version' warn(msg, DeprecationWarning, stacklevel=3) col = 0 etype = self.description[row][col] @@ -374,16 +425,17 @@ # new attributes found in this resultset ? try: entity = req.entity_cache(eid) - if entity.rset is None: - # entity has no rset set, this means entity has been cached by - # the repository (req is a repository session) which had no rset - # info. Add id. - entity.rset = self - entity.row = row - entity.col = col - return entity except KeyError: pass + else: + if entity.cw_rset is None: + # entity has no rset set, this means entity has been created by + # the querier (req is a repository session) and so jas no rset + # info. Add it. + entity.cw_rset = self + entity.cw_row = row + entity.cw_col = col + return entity # build entity instance etype = self.description[row][col] entity = self.vreg['etypes'].etype_class(etype)(req, rset=self, @@ -403,25 +455,22 @@ select = rqlst # take care, due to outer join support, we may find None # values for non final relation - for i, attr, x in attr_desc_iterator(select, col): + for i, attr, role in attr_desc_iterator(select, col): outerselidx = rqlst.subquery_selection_index(select, i) if outerselidx is None: continue - if x == 'subject': + if role == 'subject': rschema = eschema.subjrels[attr] if rschema.final: entity[attr] = rowvalues[outerselidx] continue - tetype = rschema.objects(etype)[0] - card = rschema.rproperty(etype, tetype, 'cardinality')[0] else: rschema = eschema.objrels[attr] - tetype = rschema.subjects(etype)[0] - card = rschema.rproperty(tetype, etype, 'cardinality')[1] + rdef = eschema.rdef(attr, role) # only keep value if it can't be multivalued - if card in '1?': + if rdef.role_cardinality(role) in '1?': if rowvalues[outerselidx] is None: - if x == 'subject': + if role == 'subject': rql = 'Any Y WHERE X %s Y, X eid %s' else: rql = 'Any Y WHERE Y %s X, X eid %s' @@ -429,7 +478,7 @@ req.decorate_rset(rrset) else: rrset = self._build_entity(row, outerselidx).as_rset() - entity.set_related_cache(attr, x, rrset) + entity.set_related_cache(attr, role, rrset) return entity @cached diff -r 15d541321a8c -r 74c1597f8a82 rtags.py --- a/rtags.py Wed Jan 20 10:13:02 2010 +0100 +++ b/rtags.py Wed Jan 20 10:13:45 2010 +0100 @@ -15,6 +15,9 @@ def register_rtag(rtag): RTAGS.append(rtag) +def _ensure_str_key(key): + return tuple(str(k) for k in key) + class RelationTags(object): """a tag store for full relation definitions : @@ -25,12 +28,14 @@ This class associates a single tag to each key. """ _allowed_values = None + _initfunc = None def __init__(self, name=None, initfunc=None, allowed_values=None): self._name = name or '' self._tagdefs = {} if allowed_values is not None: self._allowed_values = allowed_values - self._initfunc = initfunc + if initfunc is not None: + self._initfunc = initfunc register_rtag(self) def __repr__(self): @@ -45,22 +50,22 @@ self._tagdefs.clear() def _get_keys(self, stype, rtype, otype, tagged): - keys = [(rtype, tagged, '*', '*'), - (rtype, tagged, '*', otype), - (rtype, tagged, stype, '*'), - (rtype, tagged, stype, otype)] + keys = [('*', rtype, '*', tagged), + ('*', rtype, otype, tagged), + (stype, rtype, '*', tagged), + (stype, rtype, otype, tagged)] if stype == '*' or otype == '*': - keys.remove((rtype, tagged, '*', '*')) + keys.remove( ('*', rtype, '*', tagged) ) if stype == '*': - keys.remove((rtype, tagged, '*', otype)) + keys.remove( ('*', rtype, otype, tagged) ) if otype == '*': - keys.remove((rtype, tagged, stype, '*')) + keys.remove( (stype, rtype, '*', tagged) ) return keys def init(self, schema, check=True): # XXX check existing keys against schema if check: - for (rtype, tagged, stype, otype), value in self._tagdefs.items(): + for (stype, rtype, otype, tagged), value in self._tagdefs.items(): for ertype in (stype, rtype, otype): if ertype != '*' and not ertype in schema: self.warning('removing rtag %s: %s, %s undefined in schema', @@ -79,40 +84,36 @@ # rtag declaration api #################################################### - def tag_attribute(self, key, tag): + def tag_attribute(self, key, *args, **kwargs): key = list(key) key.append('*') - self.tag_subject_of(key, tag) + self.tag_subject_of(key, *args, **kwargs) - def tag_subject_of(self, key, tag): + def tag_subject_of(self, key, *args, **kwargs): key = list(key) key.append('subject') - self.tag_relation(key, tag) + self.tag_relation(key, *args, **kwargs) - def tag_object_of(self, key, tag): + def tag_object_of(self, key, *args, **kwargs): key = list(key) key.append('object') - self.tag_relation(key, tag) + self.tag_relation(key, *args, **kwargs) def tag_relation(self, key, tag): - #if isinstance(key, basestring): - # stype, rtype, otype = key.split() - #else: - stype, rtype, otype, tagged = [str(k) for k in key] if self._allowed_values is not None: assert tag in self._allowed_values, \ '%r is not an allowed tag (should be in %s)' % ( tag, self._allowed_values) - self._tagdefs[(rtype, tagged, stype, otype)] = tag + self._tagdefs[_ensure_str_key(key)] = tag return tag # rtag runtime api ######################################################## - def del_rtag(self, stype, rtype, otype, tagged): - del self._tagdefs[(rtype, tagged, stype, otype)] + def del_rtag(self, *key): + del self._tagdefs[key] - def get(self, stype, rtype, otype, tagged): - for key in reversed(self._get_keys(stype, rtype, otype, tagged)): + def get(self, *key): + for key in reversed(self._get_keys(*key)): try: return self._tagdefs[key] except KeyError: @@ -132,8 +133,7 @@ tag_container_cls = set def tag_relation(self, key, tag): - stype, rtype, otype, tagged = [str(k) for k in key] - rtags = self._tagdefs.setdefault((rtype, tagged, stype, otype), + rtags = self._tagdefs.setdefault(_ensure_str_key(key), self.tag_container_cls()) rtags.add(tag) return rtags @@ -153,24 +153,24 @@ tag_container_cls = dict def tag_relation(self, key, tag): - stype, rtype, otype, tagged = [str(k) for k in key] + key = _ensure_str_key(key) try: - rtags = self._tagdefs[(rtype, tagged, stype, otype)] + rtags = self._tagdefs[key] rtags.update(tag) return rtags except KeyError: - self._tagdefs[(rtype, tagged, stype, otype)] = tag + self._tagdefs[key] = tag return tag def setdefault(self, key, tagkey, tagvalue): - stype, rtype, otype, tagged = [str(k) for k in key] + key = _ensure_str_key(key) try: - rtags = self._tagdefs[(rtype, tagged, stype, otype)] + rtags = self._tagdefs[key] rtags.setdefault(tagkey, tagvalue) return rtags except KeyError: - self._tagdefs[(rtype, tagged, stype, otype)] = {tagkey: tagvalue} - return self._tagdefs[(rtype, tagged, stype, otype)] + self._tagdefs[key] = {tagkey: tagvalue} + return self._tagdefs[key] class RelationTagsBool(RelationTags): diff -r 15d541321a8c -r 74c1597f8a82 schema.py --- a/schema.py Wed Jan 20 10:13:02 2010 +0100 +++ b/schema.py Wed Jan 20 10:13:45 2010 +0100 @@ -20,7 +20,8 @@ from logilab.common.compat import any from yams import BadSchemaDefinition, buildobjs as ybo -from yams.schema import Schema, ERSchema, EntitySchema, RelationSchema +from yams.schema import Schema, ERSchema, EntitySchema, RelationSchema, \ + RelationDefinitionSchema, PermissionMixIn from yams.constraints import (BaseConstraint, StaticVocabularyConstraint, FormatConstraint) from yams.reader import (CONSTRAINTS, PyFileReader, SchemaLoader, @@ -117,7 +118,7 @@ else: return unicode(req._(key)).lower() -__builtins__['display_name'] = deprecated('display_name should be imported from cubicweb.schema')(display_name) +__builtins__['display_name'] = deprecated('[3.4] display_name should be imported from cubicweb.schema')(display_name) # rql expression utilities function ############################################ @@ -151,15 +152,15 @@ # Schema objects definition ################################################### -def ERSchema_display_name(self, req, form=''): +def ERSchema_display_name(self, req, form='', context=None): """return a internationalized string for the entity/relation type name in a given form """ - return display_name(req, self.type, form) + return display_name(req, self.type, form, context) ERSchema.display_name = ERSchema_display_name @cached -def ERSchema_get_groups(self, action): +def get_groups(self, action): """return the groups authorized to perform on entities of this type @@ -172,28 +173,13 @@ assert action in self.ACTIONS, action #assert action in self._groups, '%s %s' % (self, action) try: - return frozenset(g for g in self._groups[action] if isinstance(g, basestring)) + return frozenset(g for g in self.permissions[action] if isinstance(g, basestring)) except KeyError: return () -ERSchema.get_groups = ERSchema_get_groups - -def ERSchema_set_groups(self, action, groups): - """set the groups allowed to perform on entities of this type. Don't - change rql expressions for the same action. - - :type action: str - :param action: the name of a permission - - :type groups: list or tuple - :param groups: names of the groups granted to do the given action - """ - assert action in self.ACTIONS, action - clear_cache(self, 'ERSchema_get_groups') - self._groups[action] = tuple(groups) + self.get_rqlexprs(action) -ERSchema.set_groups = ERSchema_set_groups +PermissionMixIn.get_groups = get_groups @cached -def ERSchema_get_rqlexprs(self, action): +def get_rqlexprs(self, action): """return the rql expressions representing queries to check the user is allowed to perform on entities of this type @@ -206,27 +192,13 @@ assert action in self.ACTIONS, action #assert action in self._rqlexprs, '%s %s' % (self, action) try: - return tuple(g for g in self._groups[action] if not isinstance(g, basestring)) + return tuple(g for g in self.permissions[action] if not isinstance(g, basestring)) except KeyError: return () -ERSchema.get_rqlexprs = ERSchema_get_rqlexprs - -def ERSchema_set_rqlexprs(self, action, rqlexprs): - """set the rql expression allowing to perform on entities of this type. Don't - change groups for the same action. - - :type action: str - :param action: the name of a permission +PermissionMixIn.get_rqlexprs = get_rqlexprs - :type rqlexprs: list or tuple - :param rqlexprs: the rql expressions allowing the given action - """ - assert action in self.ACTIONS, action - clear_cache(self, 'ERSchema_get_rqlexprs') - self._groups[action] = tuple(self.get_groups(action)) + tuple(rqlexprs) -ERSchema.set_rqlexprs = ERSchema_set_rqlexprs - -def ERSchema_set_permissions(self, action, permissions): +orig_set_action_permissions = PermissionMixIn.set_action_permissions +def set_action_permissions(self, action, permissions): """set the groups and rql expressions allowing to perform on entities of this type @@ -236,22 +208,12 @@ :type permissions: tuple :param permissions: the groups and rql expressions allowing the given action """ - assert action in self.ACTIONS, action - clear_cache(self, 'ERSchema_get_rqlexprs') - clear_cache(self, 'ERSchema_get_groups') - self._groups[action] = tuple(permissions) -ERSchema.set_permissions = ERSchema_set_permissions + orig_set_action_permissions(self, action, tuple(permissions)) + clear_cache(self, 'get_rqlexprs') + clear_cache(self, 'get_groups') +PermissionMixIn.set_action_permissions = set_action_permissions -def ERSchema_has_perm(self, session, action, *args, **kwargs): - """return true if the action is granted globaly or localy""" - try: - self.check_perm(session, action, *args, **kwargs) - return True - except Unauthorized: - return False -ERSchema.has_perm = ERSchema_has_perm - -def ERSchema_has_local_role(self, action): +def has_local_role(self, action): """return true if the action *may* be granted localy (eg either rql expressions or the owners group are used in security definition) @@ -262,9 +224,83 @@ if self.get_rqlexprs(action): return True if action in ('update', 'delete'): - return self.has_group(action, 'owners') + return 'owners' in self.get_groups(action) return False -ERSchema.has_local_role = ERSchema_has_local_role +PermissionMixIn.has_local_role = has_local_role + +def may_have_permission(self, action, req): + if action != 'read' and not (self.has_local_role('read') or + self.has_perm(req, 'read')): + return False + return self.has_local_role(action) or self.has_perm(req, action) +PermissionMixIn.may_have_permission = may_have_permission + +def has_perm(self, session, action, **kwargs): + """return true if the action is granted globaly or localy""" + try: + self.check_perm(session, action, **kwargs) + return True + except Unauthorized: + return False +PermissionMixIn.has_perm = has_perm + +def check_perm(self, session, action, **kwargs): + # NB: session may be a server session or a request object check user is + # in an allowed group, if so that's enough internal sessions should + # always stop there + groups = self.get_groups(action) + if session.user.matching_groups(groups): + return + # if 'owners' in allowed groups, check if the user actually owns this + # object, if so that's enough + if 'owners' in groups and 'eid' in kwargs and session.user.owns(kwargs['eid']): + return + # else if there is some rql expressions, check them + if any(rqlexpr.check(session, **kwargs) + for rqlexpr in self.get_rqlexprs(action)): + return + raise Unauthorized(action, str(self)) +PermissionMixIn.check_perm = check_perm + + +RelationDefinitionSchema._RPROPERTIES['eid'] = None + +def rql_expression(self, expression, mainvars=None, eid=None): + """rql expression factory""" + if self.rtype.final: + return ERQLExpression(expression, mainvars, eid) + return RRQLExpression(expression, mainvars, eid) +RelationDefinitionSchema.rql_expression = rql_expression + +orig_check_permission_definitions = RelationDefinitionSchema.check_permission_definitions +def check_permission_definitions(self): + orig_check_permission_definitions(self) + schema = self.subject.schema + for action, groups in self.permissions.iteritems(): + for group_or_rqlexpr in groups: + if action == 'read' and \ + isinstance(group_or_rqlexpr, RQLExpression): + msg = "can't use rql expression for read permission of %s" + raise BadSchemaDefinition(msg % self) + elif self.final and isinstance(group_or_rqlexpr, RRQLExpression): + if schema.reading_from_database: + # we didn't have final relation earlier, so turn + # RRQLExpression into ERQLExpression now + rqlexpr = group_or_rqlexpr + newrqlexprs = [x for x in self.get_rqlexprs(action) + if not x is rqlexpr] + newrqlexprs.append(ERQLExpression(rqlexpr.expression, + rqlexpr.mainvars, + rqlexpr.eid)) + self.set_rqlexprs(action, newrqlexprs) + else: + msg = "can't use RRQLExpression on %s, use an ERQLExpression" + raise BadSchemaDefinition(msg % self) + elif not self.final and \ + isinstance(group_or_rqlexpr, ERQLExpression): + msg = "can't use ERQLExpression on %s, use a RRQLExpression" + raise BadSchemaDefinition(msg % self) +RelationDefinitionSchema.check_permission_definitions = check_permission_definitions class CubicWebEntitySchema(EntitySchema): @@ -278,8 +314,8 @@ eid = getattr(edef, 'eid', None) self.eid = eid # take care: no _groups attribute when deep-copying - if getattr(self, '_groups', None): - for groups in self._groups.itervalues(): + if getattr(self, 'permissions', None): + for groups in self.permissions.itervalues(): for group_or_rqlexpr in groups: if isinstance(group_or_rqlexpr, RRQLExpression): msg = "can't use RRQLExpression on an entity type, use an ERQLExpression (%s)" @@ -326,7 +362,7 @@ if rschema.final: if rschema == 'has_text': has_has_text = True - elif self.rproperty(rschema, 'fulltextindexed'): + elif self.rdef(rschema).get('fulltextindexed'): may_need_has_text = True elif rschema.fulltext_container: if rschema.fulltext_container == 'subject': @@ -351,32 +387,12 @@ """return True if this entity type is used to build the schema""" return self.type in SCHEMA_TYPES - def check_perm(self, session, action, eid=None): - # NB: session may be a server session or a request object - user = session.user - # check user is in an allowed group, if so that's enough - # internal sessions should always stop there - if user.matching_groups(self.get_groups(action)): - return - # if 'owners' in allowed groups, check if the user actually owns this - # object, if so that's enough - if eid is not None and 'owners' in self.get_groups(action) and \ - user.owns(eid): - return - # else if there is some rql expressions, check them - if any(rqlexpr.check(session, eid) - for rqlexpr in self.get_rqlexprs(action)): - return - raise Unauthorized(action, str(self)) - def rql_expression(self, expression, mainvars=None, eid=None): """rql expression factory""" return ERQLExpression(expression, mainvars, eid) class CubicWebRelationSchema(RelationSchema): - RelationSchema._RPROPERTIES['eid'] = None - _perms_checked = False def __init__(self, schema=None, rdef=None, eid=None, **kwargs): if rdef is not None: @@ -391,73 +407,53 @@ def meta(self): return self.type in META_RTYPES - def update(self, subjschema, objschema, rdef): - super(CubicWebRelationSchema, self).update(subjschema, objschema, rdef) - if not self._perms_checked and self._groups: - for action, groups in self._groups.iteritems(): - for group_or_rqlexpr in groups: - if action == 'read' and \ - isinstance(group_or_rqlexpr, RQLExpression): - msg = "can't use rql expression for read permission of "\ - "a relation type (%s)" - raise BadSchemaDefinition(msg % self.type) - elif self.final and isinstance(group_or_rqlexpr, RRQLExpression): - if self.schema.reading_from_database: - # we didn't have final relation earlier, so turn - # RRQLExpression into ERQLExpression now - rqlexpr = group_or_rqlexpr - newrqlexprs = [x for x in self.get_rqlexprs(action) if not x is rqlexpr] - newrqlexprs.append(ERQLExpression(rqlexpr.expression, - rqlexpr.mainvars, - rqlexpr.eid)) - self.set_rqlexprs(action, newrqlexprs) - else: - msg = "can't use RRQLExpression on a final relation "\ - "type (eg attribute relation), use an ERQLExpression (%s)" - raise BadSchemaDefinition(msg % self.type) - elif not self.final and \ - isinstance(group_or_rqlexpr, ERQLExpression): - msg = "can't use ERQLExpression on a relation type, use "\ - "a RRQLExpression (%s)" - raise BadSchemaDefinition(msg % self.type) - self._perms_checked = True - - def cardinality(self, subjtype, objtype, target): - card = self.rproperty(subjtype, objtype, 'cardinality') - return (target == 'subject' and card[0]) or \ - (target == 'object' and card[1]) - def schema_relation(self): """return True if this relation type is used to build the schema""" return self.type in SCHEMA_TYPES - def physical_mode(self): - """return an appropriate mode for physical storage of this relation type: - * 'subjectinline' if every possible subject cardinalities are 1 or ? - * 'objectinline' if 'subjectinline' mode is not possible but every - possible object cardinalities are 1 or ? - * None if neither 'subjectinline' and 'objectinline' - """ - assert not self.final - return self.inlined and 'subjectinline' or None + def may_have_permission(self, action, req, eschema=None, role=None): + if eschema is not None: + for tschema in self.targets(eschema, role): + rdef = self.role_rdef(eschema, tschema, role) + if rdef.may_have_permission(action, req): + return True + else: + for rdef in self.rdefs.itervalues(): + if rdef.may_have_permission(action, req): + return True + return False - def check_perm(self, session, action, *args, **kwargs): - # NB: session may be a server session or a request object check user is - # in an allowed group, if so that's enough internal sessions should - # always stop there - if session.user.matching_groups(self.get_groups(action)): - return - # else if there is some rql expressions, check them - if any(rqlexpr.check(session, *args, **kwargs) - for rqlexpr in self.get_rqlexprs(action)): - return - raise Unauthorized(action, str(self)) + def has_perm(self, session, action, **kwargs): + """return true if the action is granted globaly or localy""" + if 'fromeid' in kwargs: + subjtype = session.describe(kwargs['fromeid'])[0] + else: + subjtype = None + if 'toeid' in kwargs: + objtype = session.describe(kwargs['toeid'])[0] + else: + objtype = None + if objtype and subjtype: + return self.rdef(subjtype, objtype).has_perm(session, action, **kwargs) + elif subjtype: + for tschema in self.targets(subjtype, 'subject'): + rdef = self.rdef(subjtype, tschema) + if not rdef.has_perm(session, action, **kwargs): + return False + elif objtype: + for tschema in self.targets(objtype, 'object'): + rdef = self.rdef(tschema, objtype) + if not rdef.has_perm(session, action, **kwargs): + return False + else: + for rdef in self.rdefs.itervalues(): + if not rdef.has_perm(session, action, **kwargs): + return False + return True - def rql_expression(self, expression, mainvars=None, eid=None): - """rql expression factory""" - if self.final: - return ERQLExpression(expression, mainvars, eid) - return RRQLExpression(expression, mainvars, eid) + @deprecated('use .rdef(subjtype, objtype).role_cardinality(role)') + def cardinality(self, subjtype, objtype, target): + return self.rdef(subjtype, objtype).role_cardinality(target) class CubicWebSchema(Schema): @@ -482,13 +478,10 @@ ybo.register_base_types(self) rschema = self.add_relation_type(ybo.RelationType('eid')) rschema.final = True - rschema.set_default_groups() rschema = self.add_relation_type(ybo.RelationType('has_text')) rschema.final = True - rschema.set_default_groups() rschema = self.add_relation_type(ybo.RelationType('identity')) rschema.final = False - rschema.set_default_groups() def add_entity_type(self, edef): edef.name = edef.name.encode() @@ -530,13 +523,13 @@ rdef.name = rdef.name.lower() rdef.subject = bw_normalize_etype(rdef.subject) rdef.object = bw_normalize_etype(rdef.object) - if super(CubicWebSchema, self).add_relation_def(rdef): + rdefs = super(CubicWebSchema, self).add_relation_def(rdef) + if rdefs: try: - self._eid_index[rdef.eid] = (self.eschema(rdef.subject), - self.rschema(rdef.name), - self.eschema(rdef.object)) + self._eid_index[rdef.eid] = rdefs except AttributeError: pass # not a serialized schema + return rdefs def del_relation_type(self, rtype): rschema = self.rschema(rtype) @@ -545,7 +538,9 @@ def del_relation_def(self, subjtype, rtype, objtype): for k, v in self._eid_index.items(): - if v == (subjtype, rtype, objtype): + if not isinstance(v, RelationDefinitionSchema): + continue + if v.subject == subjtype and v.rtype == rtype and v.object == objtype: del self._eid_index[k] break super(CubicWebSchema, self).del_relation_def(subjtype, rtype, objtype) @@ -730,6 +725,11 @@ def __repr__(self): return '%s(%s)' % (self.__class__.__name__, self.full_rql) + def __cmp__(self, other): + if hasattr(other, 'expression'): + return cmp(other.expression, self.expression) + return -1 + def __deepcopy__(self, memo): return self.__class__(self.expression, self.mainvars) def __getstate__(self): @@ -831,7 +831,7 @@ for eaction, var, col in has_perm_defs: for i in xrange(len(rset)): eschema = get_eschema(rset.description[i][col]) - eschema.check_perm(session, eaction, rset[i][col]) + eschema.check_perm(session, eaction, eid=rset[i][col]) if self.eid is not None: session.local_perm_cache[key] = True return True @@ -1036,12 +1036,12 @@ @monkeypatch(FormatConstraint) def vocabulary(self, entity=None, form=None): - req = None + cw = None if form is None and entity is not None: - req = entity.req + cw = entity._cw elif form is not None: - req = form.req - if req is not None and req.user.has_permission(PERM_USE_TEMPLATE_FORMAT): + cw = form._cw + if cw is not None and cw.user.has_permission(PERM_USE_TEMPLATE_FORMAT): return self.regular_formats + tuple(NEED_PERM_FORMATS) return self.regular_formats @@ -1076,10 +1076,8 @@ # XXX deprecated -from yams.constraints import format_constraint from yams.buildobjs import RichString PyFileReader.context['ERQLExpression'] = yobsolete(ERQLExpression) PyFileReader.context['RRQLExpression'] = yobsolete(RRQLExpression) PyFileReader.context['WorkflowableEntityType'] = WorkflowableEntityType -PyFileReader.context['format_constraint'] = format_constraint diff -r 15d541321a8c -r 74c1597f8a82 schemas/Bookmark.py --- a/schemas/Bookmark.py Wed Jan 20 10:13:02 2010 +0100 +++ b/schemas/Bookmark.py Wed Jan 20 10:13:45 2010 +0100 @@ -13,7 +13,7 @@ class Bookmark(EntityType): """bookmarks are used to have user's specific internal links""" - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests',), 'add': ('managers', 'users',), 'delete': ('managers', 'owners',), @@ -29,7 +29,7 @@ class bookmarked_by(RelationType): - permissions = {'read': ('managers', 'users', 'guests',), + __permissions__ = {'read': ('managers', 'users', 'guests',), # test user in users group to avoid granting permission to anonymous user 'add': ('managers', RRQLExpression('O identity U, U in_group G, G name "users"')), 'delete': ('managers', RRQLExpression('O identity U, U in_group G, G name "users"')), diff -r 15d541321a8c -r 74c1597f8a82 schemas/base.py --- a/schemas/base.py Wed Jan 20 10:13:02 2010 +0100 +++ b/schemas/base.py Wed Jan 20 10:13:45 2010 +0100 @@ -16,7 +16,7 @@ class CWUser(WorkflowableEntityType): """define a CubicWeb user""" - permissions = { + __permissions__ = { 'read': ('managers', 'users', ERQLExpression('X identity U')), 'add': ('managers',), 'delete': ('managers',), @@ -42,7 +42,7 @@ class EmailAddress(EntityType): """an electronic mail address associated to a short alias""" - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests',), # XXX if P use_email X, U has_read_permission P 'add': ('managers', 'users',), 'delete': ('managers', 'owners', ERQLExpression('P use_email X, U has_update_permission P')), @@ -59,7 +59,7 @@ class use_email(RelationType): """ """ - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests',), 'add': ('managers', RRQLExpression('U has_update_permission S'),), 'delete': ('managers', RRQLExpression('U has_update_permission S'),), @@ -68,12 +68,12 @@ class primary_email(RelationType): """the prefered email""" - permissions = use_email.permissions + __permissions__ = use_email.__permissions__ class prefered_form(RelationType): - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests',), - # XXX should have update permissions on both subject and object, + # XXX should have update __permissions__ on both subject and object, # though by doing this we will probably have no way to add # this relation in the web ui. The easiest way to acheive this # is probably to be able to have "U has_update_permission O" as @@ -85,13 +85,13 @@ class in_group(RelationType): """core relation indicating a user's groups""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS class owned_by(RelationType): """core relation indicating owners of an entity. This relation implicitly put the owner into the owners group for the entity """ - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'add': ('managers', RRQLExpression('S owned_by U'),), 'delete': ('managers', RRQLExpression('S owned_by U'),), @@ -104,7 +104,7 @@ class created_by(RelationType): """core relation indicating the original creator of an entity""" - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'add': ('managers',), 'delete': ('managers',), @@ -135,46 +135,27 @@ object = 'String' -class CWProperty(EntityType): - """used for cubicweb configuration. Once a property has been created you - can't change the key. - """ - permissions = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers', 'users',), - 'update': ('managers', 'owners',), - 'delete': ('managers', 'owners',), - } - # key is a reserved word for mysql - pkey = String(required=True, internationalizable=True, maxsize=256, - description=_('defines what\'s the property is applied for. ' - 'You must select this first to be able to set ' - 'value')) - value = String(internationalizable=True, maxsize=256) - - for_user = SubjectRelation('CWUser', cardinality='?*', composite='object', - description=_('user for which this property is ' - 'applying. If this relation is not ' - 'set, the property is considered as' - ' a global property')) - - +# XXX find a better relation name class for_user(RelationType): """link a property to the user which want this property customization. Unless you're a site manager, this relation will be handled automatically. """ - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'add': ('managers',), 'delete': ('managers',), } inlined = True + subject = 'CWProperty' + object = 'CWUser' + composite = 'object' + cardinality = '?*' class CWPermission(EntityType): """entity type that may be used to construct some advanced security configuration """ - permissions = META_ETYPE_PERMS + __permissions__ = META_ETYPE_PERMS name = String(required=True, indexed=True, internationalizable=True, maxsize=100, description=_('name or identifier of the permission')) @@ -189,7 +170,7 @@ """link a permission to the entity. This permission should be used in the security definition of the entity's type to be useful. """ - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'add': ('managers',), 'delete': ('managers',), @@ -197,7 +178,7 @@ class require_group(RelationType): """used to grant a permission to a group""" - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'add': ('managers',), 'delete': ('managers',), @@ -217,7 +198,7 @@ NOTE: You'll have to explicitly declare which entity types can have a same_as relation """ - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests',), 'add': ('managers', 'users'), 'delete': ('managers', 'owners'), @@ -237,7 +218,7 @@ Also, checkout the AppObject.get_cache() method. """ - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'add': ('managers',), 'update': ('managers', 'users',), # XXX @@ -254,9 +235,9 @@ class identical_to(RelationType): """identical to""" symetric = True - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests',), - # XXX should have update permissions on both subject and object, + # XXX should have update __permissions__ on both subject and object, # though by doing this we will probably have no way to add # this relation in the web ui. The easiest way to acheive this # is probably to be able to have "U has_update_permission O" as @@ -269,7 +250,7 @@ class see_also(RelationType): """generic relation to link one entity to another""" symetric = True - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests',), 'add': ('managers', RRQLExpression('U has_update_permission S'),), 'delete': ('managers', RRQLExpression('U has_update_permission S'),), diff -r 15d541321a8c -r 74c1597f8a82 schemas/bootstrap.py --- a/schemas/bootstrap.py Wed Jan 20 10:13:02 2010 +0100 +++ b/schemas/bootstrap.py Wed Jan 20 10:13:45 2010 +0100 @@ -17,7 +17,7 @@ # access to this class CWEType(EntityType): """define an entity type, used to build the instance schema""" - permissions = META_ETYPE_PERMS + __permissions__ = META_ETYPE_PERMS name = String(required=True, indexed=True, internationalizable=True, unique=True, maxsize=64) description = RichString(internationalizable=True, @@ -28,7 +28,7 @@ class CWRType(EntityType): """define a relation type, used to build the instance schema""" - permissions = META_ETYPE_PERMS + __permissions__ = META_ETYPE_PERMS name = String(required=True, indexed=True, internationalizable=True, unique=True, maxsize=64) description = RichString(internationalizable=True, @@ -48,7 +48,7 @@ used to build the instance schema """ - permissions = META_ETYPE_PERMS + __permissions__ = META_ETYPE_PERMS relation_type = SubjectRelation('CWRType', cardinality='1*', constraints=[RQLConstraint('O final TRUE')], composite='object') @@ -85,7 +85,7 @@ used to build the instance schema """ - permissions = META_ETYPE_PERMS + __permissions__ = META_ETYPE_PERMS relation_type = SubjectRelation('CWRType', cardinality='1*', constraints=[RQLConstraint('O final FALSE')], composite='object') @@ -115,8 +115,8 @@ # not restricted since it has to be read when checking allowed transitions class RQLExpression(EntityType): - """define a rql expression used to define permissions""" - permissions = META_ETYPE_PERMS + """define a rql expression used to define __permissions__""" + __permissions__ = META_ETYPE_PERMS exprtype = String(required=True, vocabulary=['ERQLExpression', 'RRQLExpression']) mainvars = String(maxsize=8, description=_('name of the main variables which should be ' @@ -131,11 +131,11 @@ 'relation\'subject, object and to ' 'the request user. ')) - read_permission = ObjectRelation(('CWEType', 'CWRType'), cardinality='+?', composite='subject', + read_permission = ObjectRelation(('CWEType', 'CWAttribute', 'CWRelation'), cardinality='+?', composite='subject', description=_('rql expression allowing to read entities/relations of this type')) - add_permission = ObjectRelation(('CWEType', 'CWRType'), cardinality='*?', composite='subject', + add_permission = ObjectRelation(('CWEType', 'CWAttribute', 'CWRelation'), cardinality='*?', composite='subject', description=_('rql expression allowing to add entities/relations of this type')) - delete_permission = ObjectRelation(('CWEType', 'CWRType'), cardinality='*?', composite='subject', + delete_permission = ObjectRelation(('CWEType', 'CWAttribute', 'CWRelation'), cardinality='*?', composite='subject', description=_('rql expression allowing to delete entities/relations of this type')) update_permission = ObjectRelation('CWEType', cardinality='*?', composite='subject', description=_('rql expression allowing to update entities of this type')) @@ -143,14 +143,14 @@ class CWConstraint(EntityType): """define a schema constraint""" - permissions = META_ETYPE_PERMS + __permissions__ = META_ETYPE_PERMS cstrtype = SubjectRelation('CWConstraintType', cardinality='1*') value = String(description=_('depends on the constraint type')) class CWConstraintType(EntityType): """define a schema constraint type""" - permissions = META_ETYPE_PERMS + __permissions__ = META_ETYPE_PERMS name = String(required=True, indexed=True, internationalizable=True, unique=True, maxsize=64) @@ -158,67 +158,84 @@ # not restricted since it has to be read when checking allowed transitions class CWGroup(EntityType): """define a CubicWeb users group""" - permissions = META_ETYPE_PERMS + __permissions__ = META_ETYPE_PERMS name = String(required=True, indexed=True, internationalizable=True, unique=True, maxsize=64) - read_permission = ObjectRelation(('CWEType', 'CWRType'), cardinality='+*', + read_permission = ObjectRelation(('CWEType', 'CWAttribute', 'CWRelation'), cardinality='+*', description=_('groups allowed to read entities/relations of this type')) - add_permission = ObjectRelation(('CWEType', 'CWRType'), + add_permission = ObjectRelation(('CWEType', 'CWAttribute', 'CWRelation'), description=_('groups allowed to add entities/relations of this type')) - delete_permission = ObjectRelation(('CWEType', 'CWRType'), + delete_permission = ObjectRelation(('CWEType', 'CWAttribute', 'CWRelation'), description=_('groups allowed to delete entities/relations of this type')) update_permission = ObjectRelation('CWEType', description=_('groups allowed to update entities of this type')) +class CWProperty(EntityType): + """used for cubicweb configuration. Once a property has been created you + can't change the key. + """ + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers', 'users',), + 'update': ('managers', 'owners',), + 'delete': ('managers', 'owners',), + } + # key is a reserved word for mysql + pkey = String(required=True, internationalizable=True, maxsize=256, + description=_('defines what\'s the property is applied for. ' + 'You must select this first to be able to set ' + 'value')) + value = String(internationalizable=True, maxsize=256) + class relation_type(RelationType): """link a relation definition to its relation type""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS inlined = True class from_entity(RelationType): """link a relation definition to its subject entity type""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS inlined = True class to_entity(RelationType): """link a relation definition to its object entity type""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS inlined = True class constrained_by(RelationType): """constraints applying on this relation""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS class cstrtype(RelationType): """constraint factory""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS inlined = True class read_permission(RelationType): """core relation giving to a group the permission to read an entity or relation type """ - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS class add_permission(RelationType): """core relation giving to a group the permission to add an entity or relation type """ - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS class delete_permission(RelationType): """core relation giving to a group the permission to delete an entity or relation type """ - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS class update_permission(RelationType): """core relation giving to a group the permission to update an entity type """ - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS class is_(RelationType): @@ -227,7 +244,7 @@ name = 'is' # don't explicitly set composite here, this is handled anyway #composite = 'object' - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'add': (), 'delete': (), @@ -242,7 +259,7 @@ """ # don't explicitly set composite here, this is handled anyway #composite = 'object' - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'add': (), 'delete': (), @@ -253,7 +270,7 @@ class specializes(RelationType): name = 'specializes' - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'add': ('managers',), 'delete': ('managers',), diff -r 15d541321a8c -r 74c1597f8a82 schemas/workflow.py --- a/schemas/workflow.py Wed Jan 20 10:13:02 2010 +0100 +++ b/schemas/workflow.py Wed Jan 20 10:13:45 2010 +0100 @@ -15,7 +15,7 @@ HOOKS_RTYPE_PERMS) class Workflow(EntityType): - permissions = META_ETYPE_PERMS + __permissions__ = META_ETYPE_PERMS name = String(required=True, indexed=True, internationalizable=True, maxsize=256) @@ -34,7 +34,7 @@ class default_workflow(RelationType): """default workflow for an entity type""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS subject = 'CWEType' object = 'Workflow' @@ -47,7 +47,7 @@ """used to associate simple states to an entity type and/or to define workflows """ - permissions = META_ETYPE_PERMS + __permissions__ = META_ETYPE_PERMS name = String(required=True, indexed=True, internationalizable=True, maxsize=256, @@ -70,7 +70,7 @@ class BaseTransition(EntityType): """abstract base class for transitions""" - permissions = META_ETYPE_PERMS + __permissions__ = META_ETYPE_PERMS name = String(required=True, indexed=True, internationalizable=True, maxsize=256, @@ -140,7 +140,7 @@ class TrInfo(EntityType): """workflow history item""" # 'add' security actually done by hooks - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests',), # XXX U has_read_permission O ? 'add': ('managers', 'users', 'guests',), 'delete': (), # XXX should we allow managers to delete TrInfo? @@ -156,20 +156,16 @@ # get actor and date time using owned_by and creation_date class from_state(RelationType): - permissions = HOOKS_RTYPE_PERMS.copy() + __permissions__ = HOOKS_RTYPE_PERMS.copy() inlined = True class to_state(RelationType): - permissions = { - 'read': ('managers', 'users', 'guests',), - 'add': ('managers',), - 'delete': (), - } + __permissions__ = HOOKS_RTYPE_PERMS.copy() inlined = True class by_transition(RelationType): # 'add' security actually done by hooks - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests',), 'add': ('managers', 'users', 'guests',), 'delete': (), @@ -178,52 +174,54 @@ class workflow_of(RelationType): """link a workflow to one or more entity type""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS class state_of(RelationType): """link a state to one or more workflow""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS + inlined = True class transition_of(RelationType): """link a transition to one or more workflow""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS + inlined = True class subworkflow(RelationType): """link a transition to one or more workflow""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS inlined = True class exit_point(RelationType): """link a transition to one or more workflow""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS class subworkflow_state(RelationType): """link a transition to one or more workflow""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS inlined = True class initial_state(RelationType): """indicate which state should be used by default when an entity using states is created """ - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS inlined = True class destination_state(RelationType): """destination state of a transition""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS inlined = True class allowed_transition(RelationType): """allowed transition from this state""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS # "abstract" relations, set by WorkflowableEntityType ########################## class custom_workflow(RelationType): """allow to set a specific workflow for an entity""" - permissions = META_RTYPE_PERMS + __permissions__ = META_RTYPE_PERMS cardinality = '?*' constraints = [RQLConstraint('S is ET, O workflow_of ET', @@ -234,7 +232,7 @@ class wf_info_for(RelationType): """link a transition information to its object""" # 'add' security actually done by hooks - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests',), 'add': ('managers', 'users', 'guests',), 'delete': (), @@ -249,7 +247,7 @@ class in_state(RelationType): """indicate the current state of an entity""" - permissions = HOOKS_RTYPE_PERMS + __permissions__ = HOOKS_RTYPE_PERMS # not inlined intentionnaly since when using ldap sources, user'state # has to be stored outside the CWUser table diff -r 15d541321a8c -r 74c1597f8a82 schemaviewer.py --- a/schemaviewer.py Wed Jan 20 10:13:02 2010 +0100 +++ b/schemaviewer.py Wed Jan 20 10:13:45 2010 +0100 @@ -9,10 +9,13 @@ _ = unicode from logilab.common.ureports import Section, Title, Table, Link, Span, Text + from yams.schema2dot import CARD_MAP +from yams.schema import RelationDefinitionSchema I18NSTRINGS = [_('read'), _('add'), _('delete'), _('update'), _('order')] + class SchemaViewer(object): """return an ureport layout for some part of a schema""" def __init__(self, req=None, encoding=None): @@ -68,7 +71,8 @@ _ = self.req._ data = [_('attribute'), _('type'), _('default'), _('constraints')] for rschema, aschema in eschema.attribute_definitions(): - if not (rschema.has_local_role('read') or rschema.has_perm(self.req, 'read')): + rdef = eschema.rdef(rschema) + if not rdef.may_have_permission('read', self.req): continue aname = rschema.type if aname == 'eid': @@ -78,7 +82,7 @@ defaultval = eschema.default(aname) if defaultval is not None: default = self.to_string(defaultval) - elif eschema.rproperty(rschema, 'cardinality')[0] == '1': + elif rdef.cardinality[0] == '1': default = _('required field') else: default = '' @@ -119,20 +123,23 @@ t_vars = [] rels = [] first = True - for rschema, targetschemas, x in eschema.relation_definitions(): + for rschema, targetschemas, role in eschema.relation_definitions(): if rschema.type in skiptypes: continue - if not (rschema.has_local_role('read') or rschema.has_perm(self.req, 'read')): - continue rschemaurl = self.rschema_link_url(rschema) for oeschema in targetschemas: + rdef = rschema.role_rdef(eschema, oeschema, role) + if not rdef.may_have_permission('read', self.req): + continue label = rschema.type - if x == 'subject': + if role == 'subject': cards = rschema.rproperty(eschema, oeschema, 'cardinality') else: cards = rschema.rproperty(oeschema, eschema, 'cardinality') cards = cards[::-1] - label = '%s %s (%s) %s' % (CARD_MAP[cards[1]], label, display_name(self.req, label, x), CARD_MAP[cards[0]]) + label = '%s %s (%s) %s' % (CARD_MAP[cards[1]], label, + display_name(self.req, label, role), + CARD_MAP[cards[0]]) rlink = Link(rschemaurl, label) elink = Link(self.eschema_link_url(oeschema), oeschema.type) if first: @@ -180,7 +187,7 @@ rschema_objects = rschema.objects() if rschema_objects: # might be empty - properties = [p for p in rschema.rproperty_defs(rschema_objects[0]) + properties = [p for p in RelationDefinitionSchema.rproperty_defs(rschema_objects[0]) if not p in ('cardinality', 'composite', 'eid')] else: properties = [] @@ -196,8 +203,9 @@ done.add((objtype, subjtype)) data.append(Link(self.eschema_link_url(schema[subjtype]), subjtype)) data.append(Link(self.eschema_link_url(schema[objtype]), objtype)) + rdef = rschema.rdef(subjtype, objtype) for prop in properties: - val = rschema.rproperty(subjtype, objtype, prop) + val = getattr(rdef, prop) if val is None: val = '' elif isinstance(val, (list, tuple)): @@ -209,8 +217,8 @@ data.append(Text(val)) table = Table(cols=cols, rheaders=1, children=data, klass='listing') layout.append(Section(children=(table,), klass='relationDefinition')) - if not self.req.cnx.anonymous_connection: - layout.append(self.format_acls(rschema, ('read', 'add', 'delete'))) + #if self.req.user.matching_groups('managers'): + # layout.append(self.format_acls(rschema, ('read', 'add', 'delete'))) layout.append(Section(children='', klass='clear')) return layout diff -r 15d541321a8c -r 74c1597f8a82 selectors.py --- a/selectors.py Wed Jan 20 10:13:02 2010 +0100 +++ b/selectors.py Wed Jan 20 10:13:45 2010 +0100 @@ -45,8 +45,8 @@ import logging from warnings import warn, filterwarnings +from logilab.common.deprecation import class_renamed from logilab.common.compat import all, any -from logilab.common.deprecation import deprecated from logilab.common.interface import implements as implements_iface from yams import BASE_TYPES @@ -55,6 +55,7 @@ role, typed_eid) # even if not used, let yes here so it's importable through this module from cubicweb.appobject import Selector, objectify_selector, yes +from cubicweb.vregistry import class_regid from cubicweb.cwconfig import CubicWebConfiguration from cubicweb.schema import split_expression @@ -75,11 +76,11 @@ else: selname = selector.__name__ vobj = cls - oid = vobj.id + oid = class_regid(vobj) ret = selector(cls, *args, **kwargs) if TRACED_OIDS == 'all' or oid in TRACED_OIDS: #SELECTOR_LOGGER.warning('selector %s returned %s for %s', selname, ret, cls) - print '%s -> %s for %s' % (selname, ret, vobj) + print '%s -> %s for %s(%s)' % (selname, ret, vobj, vobj.__regid__) return ret traced.__name__ = selector.__name__ return traced @@ -113,13 +114,13 @@ return traceback is None -def score_interface(cls_or_inst, cls, iface): +def score_interface(etypesreg, cls_or_inst, cls, iface): """Return XXX if the give object (maybe an instance or class) implements the interface. """ if getattr(iface, '__registry__', None) == 'etypes': # adjust score if the interface is an entity class - parents = cls_or_inst.parent_classes() + parents = etypesreg.parent_classes(cls_or_inst.__regid__) if iface is cls: return len(parents) + 4 if iface is parents[-1]: # Any @@ -158,17 +159,18 @@ return '%s(%s)' % (self.__class__.__name__, ','.join(str(s) for s in self.expected_ifaces)) - def score_interfaces(self, cls_or_inst, cls): + def score_interfaces(self, req, cls_or_inst, cls): score = 0 - vreg, eschema = cls_or_inst.vreg, cls_or_inst.e_schema + etypesreg = req.vreg['etypes'] + eschema = cls_or_inst.e_schema for iface in self.expected_ifaces: if isinstance(iface, basestring): # entity type try: - iface = vreg['etypes'].etype_class(iface) + iface = etypesreg.etype_class(iface) except KeyError: continue # entity type not in the schema - score += score_interface(cls_or_inst, cls, iface) + score += score_interface(etypesreg, cls_or_inst, cls, iface) return score @@ -176,7 +178,9 @@ """abstract class for selectors working on the entity classes of the result set. Its __call__ method has the following behaviour: - * if row is specified, return the score returned by the score_class method + * if 'entity' find in kwargs, return the score returned by the score_class + method for this entity's class + * elif row is specified, return the score returned by the score_class method called with the entity class found in the specified cell * else return the sum of score returned by the score_class method for each entity type found in the specified column, unless: @@ -191,6 +195,8 @@ @lltrace def __call__(self, cls, req, rset=None, row=None, col=0, **kwargs): + if kwargs.get('entity'): + return self.score_class(kwargs['entity'].__class__, req) if not rset: return 0 score = 0 @@ -216,7 +222,7 @@ def score(self, cls, req, etype): if etype in BASE_TYPES: return 0 - return self.score_class(cls.vreg['etypes'].etype_class(etype), req) + return self.score_class(req.vreg['etypes'].etype_class(etype), req) def score_class(self, eclass, req): raise NotImplementedError() @@ -430,7 +436,8 @@ * context (`basestring`) is matching the context property value for the given cls """ - propval = req.property_value('%s.%s.context' % (cls.__registry__, cls.id)) + propval = req.property_value('%s.%s.context' % (cls.__registry__, + cls.__regid__)) if not propval: propval = cls.context if context is not None and propval and context != propval: @@ -545,7 +552,7 @@ """ @lltrace def __call__(self, cls, req, rset=None, row=None, col=0, view=None, **kwargs): - if view is None or not view.id in self.expected: + if view is None or not view.__regid__ in self.expected: return 0 return 1 @@ -563,7 +570,7 @@ def __call__(self, cls, req, **kwargs): try: - cls.vreg[self.registry].select(self.oid, req, **kwargs) + req.vreg[self.registry].select(self.oid, req, **kwargs) return 1 except NoSelectableObject: return 0 @@ -587,7 +594,7 @@ proximity so the most specific object'll be selected """ def score_class(self, eclass, req): - return self.score_interfaces(eclass, eclass) + return self.score_interfaces(req, eclass, eclass) class specified_etype_implements(implements): @@ -617,11 +624,11 @@ # only check this is a known type if etype comes from req.form, # else we want the error to propagate try: - etype = cls.vreg.case_insensitive_etypes[etype.lower()] + etype = req.vreg.case_insensitive_etypes[etype.lower()] req.form['etype'] = etype except KeyError: return 0 - score = self.score_class(cls.vreg['etypes'].etype_class(etype), req) + score = self.score_class(req.vreg['etypes'].etype_class(etype), req) if score: eschema = req.vreg.schema.eschema(etype) if eschema.has_local_role('add') or eschema.has_perm(req, 'add'): @@ -629,25 +636,6 @@ return 0 -class entity_implements(ImplementsMixIn, EntitySelector): - """accept if entity instances found in the result set implements at least one - of the interfaces given as argument. Returned score is the number of - implemented interfaces. - - See `EntitySelector` documentation for behaviour when row is not specified. - - :param *expected_ifaces: expected interfaces. An interface may be a class - or an entity type (e.g. `basestring`) in which case - the associated class will be searched in the - registry (at selection time) - - note: when interface is an entity class, the score will reflect class - proximity so the most specific object'll be selected - """ - def score_entity(self, entity): - return self.score_interfaces(entity, entity.__class__) - - class relation_possible(EClassSelector): """accept if entity class found in the result set support the relation. @@ -670,19 +658,6 @@ self.target_etype = target_etype self.action = action - @lltrace - def __call__(self, cls, req, *args, **kwargs): - rschema = cls.schema.rschema(self.rtype) - if not (rschema.has_perm(req, self.action) - or rschema.has_local_role(self.action)): - return 0 - if self.action != 'read': - if not (rschema.has_perm(req, 'read') - or rschema.has_local_role('read')): - return 0 - score = super(relation_possible, self).__call__(cls, req, *args, **kwargs) - return score - def score_class(self, eclass, req): eschema = eclass.e_schema try: @@ -694,12 +669,13 @@ return 0 if self.target_etype is not None: try: - if self.role == 'subject': - return int(self.target_etype in rschema.objects(eschema)) - else: - return int(self.target_etype in rschema.subjects(eschema)) + rdef = rschema.role_rdef(eschema, self.target_etype, self.role) + if not rdef.may_have_permission(self.action, req): + return 0 except KeyError: return 0 + else: + return rschema.may_have_permission(self.action, req, eschema, self.role) return 1 @@ -742,17 +718,21 @@ 'object', default to 'subject'. """ - def __init__(self, rtype, role='subject', once_is_enough=False): + def __init__(self, rtype, role='subject', target_etype=None, + once_is_enough=False): super(may_add_relation, self).__init__(once_is_enough) self.rtype = rtype self.role = role + self.target_etype = target_etype def score_entity(self, entity): - rschema = entity.schema.rschema(self.rtype) + rschema = entity._cw.vreg.schema.rschema(self.rtype) + if self.target_etype is not None: + rschema = rschema.role_rdef(entity.e_schema, self.target_etype, self.role) if self.role == 'subject': - if not rschema.has_perm(entity.req, 'add', fromeid=entity.eid): + if not rschema.has_perm(entity._cw, 'add', fromeid=entity.eid): return 0 - elif not rschema.has_perm(entity.req, 'add', toeid=entity.eid): + elif not rschema.has_perm(entity._cw, 'add', toeid=entity.eid): return 0 return 1 @@ -774,11 +754,12 @@ be returned """ def __init__(self, once_is_enough=False): - super(partial_may_add_relation, self).__init__(None, None, once_is_enough) + super(partial_may_add_relation, self).__init__(None, once_is_enough=once_is_enough) def complete(self, cls): self.rtype = cls.rtype self.role = role(cls) + self.target_etype = getattr(cls, 'etype', None) class has_related_entities(EntitySelector): @@ -803,7 +784,7 @@ def score_entity(self, entity): relpossel = relation_possible(self.rtype, self.role, self.target_etype) - if not relpossel.score_class(entity.__class__, entity.req): + if not relpossel.score_class(entity.__class__, entity._cw): return 0 rset = entity.related(self.rtype, self.role) if self.target_etype: @@ -865,7 +846,7 @@ if row is None: score = 0 need_local_check = [] - geteschema = cls.schema.eschema + geteschema = req.vreg.schema.eschema for etype in rset.column_types(0): if etype in BASE_TYPES: return 0 @@ -903,7 +884,7 @@ See `EClassSelector` documentation for behaviour when row is not specified. """ def score(self, cls, req, etype): - eschema = cls.schema.eschema(etype) + eschema = req.vreg.schema.eschema(etype) if not (eschema.final or eschema.is_subobject(strict=True)) \ and eschema.has_perm(req, 'add'): return 1 @@ -943,23 +924,6 @@ return u'' % (self.rql, id(self)) -class but_etype(EntitySelector): - """accept if the given entity types are not found in the result set. - - See `EntitySelector` documentation for behaviour when row is not specified. - - :param *etypes: entity types (`basestring`) which should be refused - """ - def __init__(self, *etypes): - super(but_etype, self).__init__() - self.but_etypes = etypes - - def score(self, req, rset, row, col): - if rset.description[row][col] in self.but_etypes: - return 0 - return 1 - - class score_entity(EntitySelector): """accept if some arbitrary function return a positive score for an entity found in the result set. @@ -980,204 +944,24 @@ return 1 self.score_entity = intscore - -# XXX DEPRECATED ############################################################## -# XXX remove when deprecated functions are removed -filterwarnings('ignore', - category=DeprecationWarning, - module='cubicweb.selectors') -from cubicweb.vregistry import chainall +## deprecated stuff ############################################################ -yes_selector = deprecated()(yes) -norset_selector = deprecated()(none_rset) -rset_selector = deprecated()(any_rset) -anyrset_selector = deprecated()(nonempty_rset) -emptyrset_selector = deprecated()(empty_rset) -onelinerset_selector = deprecated()(one_line_rset) -twolinerset_selector = deprecated()(two_lines_rset) -twocolrset_selector = deprecated()(two_cols_rset) -largerset_selector = deprecated()(paginated_rset) -sortedrset_selector = deprecated()(sorted_rset) -oneetyperset_selector = deprecated()(one_etype_rset) -multitype_selector = deprecated()(two_etypes_rset) -anonymous_selector = deprecated()(anonymous_user) -not_anonymous_selector = deprecated()(authenticated_user) -primaryview_selector = deprecated()(primary_view) -contextprop_selector = deprecated()(match_context_prop) +entity_implements = class_renamed('entity_implements', implements) -@deprecated('use non_final_entity instead of %s') -def nfentity_selector(cls, req, rset=None, row=None, col=0, **kwargs): - return non_final_entity()(cls, req, rset, row, col) - -@deprecated('use implements instead of %s') -def implement_interface(cls, req, rset=None, row=None, col=0, **kwargs): - return implements(*cls.accepts_interfaces)(cls, req, rset, row, col) -_interface_selector = deprecated()(implement_interface) -interface_selector = deprecated()(implement_interface) +class _but_etype(EntitySelector): + """accept if the given entity types are not found in the result set. -@deprecated('use specified_etype_implements instead of %s') -def accept_etype(cls, req, *args, **kwargs): - """check etype presence in request form *and* accepts conformance""" - return specified_etype_implements(*cls.accepts)(cls, req, *args) -etype_form_selector = accept_etype - -@deprecated('use match_search_state instead of %s') -def searchstate_selector(cls, req, rset=None, row=None, col=0, **kwargs): - return match_search_state(cls.search_states)(cls, req, rset, row, col) - -@deprecated('use match_user_groups instead of %s') -def match_user_group(cls, req, rset=None, row=None, col=0, **kwargs): - return match_user_groups(*cls.require_groups)(cls, req, rset, row, col, **kwargs) -in_group_selector = match_user_group + See `EntitySelector` documentation for behaviour when row is not specified. -@deprecated('use relation_possible instead of %s') -def has_relation(cls, req, rset=None, row=None, col=0, **kwargs): - return relation_possible(cls.rtype, role(cls), cls.etype, - getattr(cls, 'require_permission', 'read'))(cls, req, rset, row, col, **kwargs) - -@deprecated('use relation_possible instead of %s') -def one_has_relation(cls, req, rset=None, row=None, col=0, **kwargs): - return relation_possible(cls.rtype, role(cls), cls.etype, - getattr(cls, 'require_permission', 'read', - once_is_enough=True))(cls, req, rset, row, col, **kwargs) - -@deprecated('use implements instead of %s') -def accept_rset(cls, req, rset=None, row=None, col=0, **kwargs): - """simply delegate to cls.accept_rset method""" - return implements(*cls.accepts)(cls, req, rset, row=row, col=col) -accept_rset_selector = accept_rset - -accept = chainall(non_final_entity(), accept_rset, name='accept') -accept = deprecated('use implements selector')(accept) -accept_selector = deprecated()(accept) - -accept_one = deprecated()(chainall(one_line_rset, accept, - name='accept_one')) -accept_one_selector = deprecated()(accept_one) + :param *etypes: entity types (`basestring`) which should be refused + """ + def __init__(self, *etypes): + super(_but_etype, self).__init__() + self.but_etypes = etypes - -def _rql_condition(cls, req, rset=None, row=None, col=0, **kwargs): - if cls.condition: - return rql_condition(cls.condition)(cls, req, rset, row, col) - return 1 -_rqlcondition_selector = deprecated()(_rql_condition) - -rqlcondition_selector = deprecated()(chainall(non_final_entity(), one_line_rset, _rql_condition, - name='rql_condition')) - -@deprecated('use but_etype instead of %s') -def but_etype_selector(cls, req, rset=None, row=None, col=0, **kwargs): - return but_etype(cls.etype)(cls, req, rset, row, col) - -@lltrace -def etype_rtype_selector(cls, req, rset=None, row=None, col=0, **kwargs): - schema = cls.schema - perm = getattr(cls, 'require_permission', 'read') - if hasattr(cls, 'etype'): - eschema = schema.eschema(cls.etype) - if not (eschema.has_perm(req, perm) or eschema.has_local_role(perm)): - return 0 - if hasattr(cls, 'rtype'): - rschema = schema.rschema(cls.rtype) - if not (rschema.has_perm(req, perm) or rschema.has_local_role(perm)): + def score(self, req, rset, row, col): + if rset.description[row][col] in self.but_etypes: return 0 - return 1 -etype_rtype_selector = deprecated()(etype_rtype_selector) - -#req_form_params_selector = deprecated()(match_form_params) # form_params -#kwargs_selector = deprecated()(match_kwargs) # expected_kwargs - -# compound selectors ########################################################## - -searchstate_accept = chainall(nonempty_rset(), accept, - name='searchstate_accept') -searchstate_accept_selector = deprecated()(searchstate_accept) - -searchstate_accept_one = chainall(one_line_rset, accept, _rql_condition, - name='searchstate_accept_one') -searchstate_accept_one_selector = deprecated()(searchstate_accept_one) - -searchstate_accept = deprecated()(searchstate_accept) -searchstate_accept_one = deprecated()(searchstate_accept_one) - -# end of deprecation section ################################################## - -def unbind_method(selector): - def new_selector(registered): - # get the unbound method - if hasattr(registered, 'im_func'): - registered = registered.im_func - # don't rebind since it will be done automatically during - # the assignment, inside the destination class body - return selector(registered) - new_selector.__name__ = selector.__name__ - return new_selector - - -def deprecate(registered, msg): - # get the unbound method - if hasattr(registered, 'im_func'): - registered = registered.im_func - def _deprecate(cls, vreg): - warn(msg, DeprecationWarning) - return registered(cls, vreg) - return _deprecate + return 1 -@unbind_method -def require_group_compat(registered): - def plug_selector(cls, vreg): - cls = registered(cls, vreg) - if getattr(cls, 'require_groups', None): - warn('use "match_user_groups(group1, group2)" instead of using require_groups', - DeprecationWarning) - cls.__select__ &= match_user_groups(cls.require_groups) - return cls - return plug_selector - -@unbind_method -def accepts_compat(registered): - def plug_selector(cls, vreg): - cls = registered(cls, vreg) - if getattr(cls, 'accepts', None): - warn('use "implements("EntityType", IFace)" instead of using accepts on %s' - % cls, - DeprecationWarning) - cls.__select__ &= implements(*cls.accepts) - return cls - return plug_selector - -@unbind_method -def accepts_etype_compat(registered): - def plug_selector(cls, vreg): - cls = registered(cls, vreg) - if getattr(cls, 'accepts', None): - warn('use "specified_etype_implements("EntityType", IFace)" instead of using accepts', - DeprecationWarning) - cls.__select__ &= specified_etype_implements(*cls.accepts) - return cls - return plug_selector - -@unbind_method -def condition_compat(registered): - def plug_selector(cls, vreg): - cls = registered(cls, vreg) - if getattr(cls, 'condition', None): - warn('use "use rql_condition(expression)" instead of using condition', - DeprecationWarning) - cls.__select__ &= rql_condition(cls.condition) - return cls - return plug_selector - -@unbind_method -def has_relation_compat(registered): - def plug_selector(cls, vreg): - cls = registered(cls, vreg) - if getattr(cls, 'etype', None): - warn('use relation_possible selector instead of using etype_rtype', - DeprecationWarning) - cls.__select__ &= relation_possible(cls.rtype, role(cls), - getattr(cls, 'etype', None), - action=getattr(cls, 'require_permission', 'read')) - return cls - return plug_selector - +but_etype = class_renamed('but_etype', _but_etype, 'use ~implements(*etypes) instead') diff -r 15d541321a8c -r 74c1597f8a82 server/__init__.py --- a/server/__init__.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/__init__.py Wed Jan 20 10:13:45 2010 +0100 @@ -30,7 +30,7 @@ DBG_REPO = 4 # repository events DBG_MS = 8 # multi-sources DBG_MORE = 16 # more verbosity - +DBG_ALL = 1 + 2 + 4 + 8 + 16 # current debug mode DEBUG = 0 @@ -167,7 +167,7 @@ session.commit() # reloging using the admin user config._cubes = None # avoid assertion error - repo, cnx = in_memory_cnx(config, login, pwd) + repo, cnx = in_memory_cnx(config, login, password=pwd) # trigger vreg initialisation of entity classes config.cubicweb_appobject_path = set(('entities',)) config.cube_appobject_path = set(('entities',)) @@ -203,6 +203,10 @@ def initialize_schema(config, schema, mhandler, event='create'): from cubicweb.server.schemaserial import serialize_schema + # deactivate every hooks but those responsible to set metadata + # so, NO INTEGRITY CHECKS are done, to have quicker db creation + oldmode = config.set_hooks_mode(config.DENY_ALL) + changes = config.enable_hook_category('metadata') paths = [p for p in config.cubes_path() + [config.apphome] if exists(join(p, 'migration'))] # execute cubicweb's pre script @@ -218,6 +222,10 @@ # execute cubes'post script if any for path in reversed(paths): mhandler.exec_event_script('post%s' % event, path) + # restore hooks config + if changes: + config.disable_hook_category(changes) + config.set_hooks_mode(oldmode) # sqlite'stored procedures have to be registered at connexion opening time diff -r 15d541321a8c -r 74c1597f8a82 server/checkintegrity.py --- a/server/checkintegrity.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/checkintegrity.py Wed Jan 20 10:13:45 2010 +0100 @@ -68,8 +68,6 @@ """reindex all entities in the repository""" # deactivate modification_date hook since we don't want them # to be updated due to the reindexation - from cubicweb.server.hooks import (setmtime_before_update_entity, - uniquecstrcheck_before_modification) from cubicweb.server.repository import FTIndexEntityOp repo = session.repo cursor = session.pool['system'] @@ -80,10 +78,8 @@ # XXX indexer.init_fti(cursor) once index 0.7 is out indexer.init_extensions(cursor) cursor.execute(indexer.sql_init_fti()) - repo.hm.unregister_hook(setmtime_before_update_entity, - 'before_update_entity', '') - repo.hm.unregister_hook(uniquecstrcheck_before_modification, - 'before_update_entity', '') + repo.config.disabled_hooks_categories.add('metadata') + repo.config.disabled_hooks_categories.add('integrity') repo.do_fti = True # ensure full-text indexation is activated etypes = set() for eschema in schema.entities(): diff -r 15d541321a8c -r 74c1597f8a82 server/hook.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/hook.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,501 @@ +"""Hooks management + +This module defined the `Hook` class and registry and a set of abstract classes +for operations. + + +Hooks are called before / after any individual update of entities / relations +in the repository and on special events such as server startup or shutdown. + + +Operations may be registered by hooks during a transaction, which will be +fired when the pool is commited or rollbacked. + + +Entity hooks (eg before_add_entity, after_add_entity, before_update_entity, +after_update_entity, before_delete_entity, after_delete_entity) all have an +`entity` attribute + +Relation (eg before_add_relation, after_add_relation, before_delete_relation, +after_delete_relation) all have `eidfrom`, `rtype`, `eidto` attributes. + +Server start/stop hooks (eg server_startup, server_shutdown) have a `repo` +attribute, but *their `_cw` attribute is None*. + +Backup/restore hooks (eg server_backup, server_restore) have a `repo` and a +`timestamp` attributes, but *their `_cw` attribute is None*. + +Session hooks (eg session_open, session_close) have no special attribute. + + +:organization: Logilab +:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +from warnings import warn +from logging import getLogger +from itertools import chain + +from logilab.common.decorators import classproperty +from logilab.common.deprecation import deprecated +from logilab.common.logging_ext import set_log_methods + +from cubicweb.cwvreg import CWRegistry, VRegistry +from cubicweb.selectors import (objectify_selector, lltrace, match_search_state, + implements) +from cubicweb.appobject import AppObject + + +ENTITIES_HOOKS = set(('before_add_entity', 'after_add_entity', + 'before_update_entity', 'after_update_entity', + 'before_delete_entity', 'after_delete_entity')) +RELATIONS_HOOKS = set(('before_add_relation', 'after_add_relation' , + 'before_delete_relation','after_delete_relation')) +SYSTEM_HOOKS = set(('server_backup', 'server_restore', + 'server_startup', 'server_shutdown', + 'session_open', 'session_close')) +ALL_HOOKS = ENTITIES_HOOKS | RELATIONS_HOOKS | SYSTEM_HOOKS + + +class HooksRegistry(CWRegistry): + + def register(self, obj, **kwargs): + try: + iter(obj.events) + except AttributeError: + raise + except: + raise Exception('bad .events attribute %s on %s.%s' % ( + obj.events, obj.__module__, obj.__name__)) + for event in obj.events: + if event not in ALL_HOOKS: + raise Exception('bad event %s on %s.%s' % ( + event, obj.__module__, obj.__name__)) + super(HooksRegistry, self).register(obj, **kwargs) + + def call_hooks(self, event, req=None, **kwargs): + kwargs['event'] = event + for hook in sorted(self.possible_objects(req, **kwargs), key=lambda x: x.order): + if hook.enabled: + hook() + else: + warn('[3.6] %s: enabled is deprecated' % self.__class__) + +VRegistry.REGISTRY_FACTORY['hooks'] = HooksRegistry + + +def entity_oldnewvalue(entity, attr): + """returns the couple (old attr value, new attr value) + NOTE: will only work in a before_update_entity hook + """ + # get new value and remove from local dict to force a db query to + # fetch old value + newvalue = entity.pop(attr, None) + oldvalue = getattr(entity, attr) + if newvalue is not None: + entity[attr] = newvalue + return oldvalue, newvalue + + +# some hook specific selectors ################################################# + +@objectify_selector +@lltrace +def match_event(cls, req, **kwargs): + if kwargs.get('event') in cls.events: + return 1 + return 0 + +@objectify_selector +@lltrace +def enabled_category(cls, req, **kwargs): + if req is None: + # server startup / shutdown event + config = kwargs['repo'].config + else: + config = req.vreg.config + return config.is_hook_activated(cls) + +@objectify_selector +@lltrace +def regular_session(cls, req, **kwargs): + if req is None or req.is_super_session: + return 0 + return 1 + + +class rechain(object): + def __init__(self, *iterators): + self.iterators = iterators + def __iter__(self): + return iter(chain(*self.iterators)) + + +class match_rtype(match_search_state): + """accept if parameters specified as initializer arguments are specified + in named arguments given to the selector + + :param *expected: parameters (eg `basestring`) which are expected to be + found in named arguments (kwargs) + """ + def __init__(self, *expected): + self.expected = expected + # if len(expected) == 1: + # try: + # iter(expected[0]) + # self.expected = expected[0] + # except TypeError: + # pass + + @lltrace + def __call__(self, cls, req, *args, **kwargs): + return kwargs.get('rtype') in self.expected + +class match_rtype_sets(match_search_state): + """accept if parameters specified as initializer arguments are specified + in named arguments given to the selector + """ + + def __init__(self, *expected): + self.expected = expected + + @lltrace + def __call__(self, cls, req, *args, **kwargs): + for rel_set in self.expected: + if kwargs.get('rtype') in rel_set: + return 1 + return 0 + +# base class for hook ########################################################## + +class Hook(AppObject): + __registry__ = 'hooks' + __select__ = match_event() & enabled_category() + # set this in derivated classes + events = None + category = None + order = 0 + # XXX deprecated + enabled = True + + @classproperty + def __regid__(cls): + warn('[3.6] %s.%s: please specify an id for your hook' + % (cls.__module__, cls.__name__), DeprecationWarning) + return str(id(cls)) + + @classmethod + def __registered__(cls, vreg): + super(Hook, cls).__registered__(vreg) + if getattr(cls, 'accepts', None): + warn('[3.6] %s.%s: accepts is deprecated, define proper __select__' + % (cls.__module__, cls.__name__), DeprecationWarning) + rtypes = [] + for ertype in cls.accepts: + if ertype.islower(): + rtypes.append(ertype) + else: + cls.__select__ = cls.__select__ & implements(ertype) + if rtypes: + cls.__select__ = cls.__select__ & match_rtype(*rtypes) + return cls + + known_args = set(('entity', 'rtype', 'eidfrom', 'eidto', 'repo', 'timestamp')) + def __init__(self, req, event, **kwargs): + for arg in self.known_args: + if arg in kwargs: + setattr(self, arg, kwargs.pop(arg)) + super(Hook, self).__init__(req, **kwargs) + self.event = event + + def __call__(self): + if hasattr(self, 'call'): + cls = self.__class__ + warn('[3.6] %s.%s: call is deprecated, implements __call__' + % (cls.__module__, cls.__name__), DeprecationWarning) + if self.event.endswith('_relation'): + self.call(self._cw, self.eidfrom, self.rtype, self.eidto) + elif 'delete' in self.event: + self.call(self._cw, self.entity.eid) + elif self.event.startswith('server_'): + self.call(self.repo) + elif self.event.startswith('session_'): + self.call(self._cw) + else: + self.call(self._cw, self.entity) + +set_log_methods(Hook, getLogger('cubicweb.hook')) + + +# base classes for relation propagation ######################################## + +class PropagateSubjectRelationHook(Hook): + """propagate permissions and nosy list when new entity are added""" + events = ('after_add_relation',) + + # to set in concrete class + main_rtype = None + subject_relations = None + object_relations = None + + def __call__(self): + for eid in (self.eidfrom, self.eidto): + etype = self._cw.describe(eid)[0] + if not self._cw.vreg.schema.eschema(etype).has_subject_relation(self.main_rtype): + return + if self.rtype in self.subject_relations: + meid, seid = self.eidfrom, self.eidto + else: + assert self.rtype in self.object_relations + meid, seid = self.eidto, self.eidfrom + self._cw.unsafe_execute( + 'SET E %s P WHERE X %s P, X eid %%(x)s, E eid %%(e)s, NOT E %s P'\ + % (self.main_rtype, self.main_rtype, self.main_rtype), + {'x': meid, 'e': seid}, ('x', 'e')) + + +class PropagateSubjectRelationAddHook(Hook): + """propagate on existing entities when a permission or nosy list is added""" + events = ('after_add_relation',) + + # to set in concrete class + main_rtype = None + subject_relations = None + object_relations = None + + def __call__(self): + eschema = self._cw.vreg.schema.eschema(self._cw.describe(self.eidfrom)[0]) + execute = self._cw.unsafe_execute + for rel in self.subject_relations: + if rel in eschema.subjrels: + execute('SET R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' + 'X %s R, NOT R %s P' % (self.rtype, rel, self.rtype), + {'x': self.eidfrom, 'p': self.eidto}, 'x') + for rel in self.object_relations: + if rel in eschema.objrels: + execute('SET R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' + 'R %s X, NOT R %s P' % (self.rtype, rel, self.rtype), + {'x': self.eidfrom, 'p': self.eidto}, 'x') + + +class PropagateSubjectRelationDelHook(Hook): + """propagate on existing entities when a permission is deleted""" + events = ('after_delete_relation',) + + # to set in concrete class + main_rtype = None + subject_relations = None + object_relations = None + + def __call__(self): + eschema = self._cw.vreg.schema.eschema(self._cw.describe(self.eidfrom)[0]) + execute = self._cw.unsafe_execute + for rel in self.subject_relations: + if rel in eschema.subjrels: + execute('DELETE R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' + 'X %s R' % (self.rtype, rel), + {'x': self.eidfrom, 'p': self.eidto}, 'x') + for rel in self.object_relations: + if rel in eschema.objrels: + execute('DELETE R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' + 'R %s X' % (self.rtype, rel), + {'x': self.eidfrom, 'p': self.eidto}, 'x') + + +# abstract classes for operation ############################################### + +class Operation(object): + """an operation is triggered on connections pool events related to + commit / rollback transations. Possible events are: + + precommit: + the pool is preparing to commit. You shouldn't do anything things which + has to be reverted if the commit fail at this point, but you can freely + do any heavy computation or raise an exception if the commit can't go. + You can add some new operation during this phase but their precommit + event won't be triggered + + commit: + the pool is preparing to commit. You should avoid to do to expensive + stuff or something that may cause an exception in this event + + revertcommit: + if an operation failed while commited, this event is triggered for + all operations which had their commit event already to let them + revert things (including the operation which made fail the commit) + + rollback: + the transaction has been either rollbacked either + * intentionaly + * a precommit event failed, all operations are rollbacked + * a commit event failed, all operations which are not been triggered for + commit are rollbacked + + order of operations may be important, and is controlled according to: + * operation's class + """ + + def __init__(self, session, **kwargs): + self.session = session + self.__dict__.update(kwargs) + self.register(session) + # execution information + self.processed = None # 'precommit', 'commit' + self.failed = False + + def register(self, session): + session.add_operation(self, self.insert_index()) + + def insert_index(self): + """return the index of the lastest instance which is not a + LateOperation instance + """ + # faster by inspecting operation in reverse order for heavy transactions + i = None + for i, op in enumerate(reversed(self.session.pending_operations)): + if isinstance(op, (LateOperation, SingleLastOperation)): + continue + return -i or None + if i is None: + return None + return -(i + 1) + + def handle_event(self, event): + """delegate event handling to the opertaion""" + getattr(self, event)() + + def precommit_event(self): + """the observed connections pool is preparing a commit""" + + def revertprecommit_event(self): + """an error went when pre-commiting this operation or a later one + + should revert pre-commit's changes but take care, they may have not + been all considered if it's this operation which failed + """ + + def commit_event(self): + """the observed connections pool is commiting""" + + def revertcommit_event(self): + """an error went when commiting this operation or a later one + + should revert commit's changes but take care, they may have not + been all considered if it's this operation which failed + """ + + def rollback_event(self): + """the observed connections pool has been rollbacked + + do nothing by default, the operation will just be removed from the pool + operation list + """ + + def postcommit_event(self): + """the observed connections pool has committed""" + + @property + @deprecated('[3.6] use self.session.user') + def user(self): + return self.session.user + + @property + @deprecated('[3.6] use self.session.repo') + def repo(self): + return self.session.repo + + @property + @deprecated('[3.6] use self.session.vreg.schema') + def schema(self): + return self.session.repo.schema + + @property + @deprecated('[3.6] use self.session.vreg.config') + def config(self): + return self.session.repo.config + +set_log_methods(Operation, getLogger('cubicweb.session')) + + +class LateOperation(Operation): + """special operation which should be called after all possible (ie non late) + operations + """ + def insert_index(self): + """return the index of the lastest instance which is not a + SingleLastOperation instance + """ + # faster by inspecting operation in reverse order for heavy transactions + i = None + for i, op in enumerate(reversed(self.session.pending_operations)): + if isinstance(op, SingleLastOperation): + continue + return -i or None + if i is None: + return None + return -(i + 1) + + +class SingleOperation(Operation): + """special operation which should be called once""" + def register(self, session): + """override register to handle cases where this operation has already + been added + """ + operations = session.pending_operations + index = self.equivalent_index(operations) + if index is not None: + equivalent = operations.pop(index) + else: + equivalent = None + session.add_operation(self, self.insert_index()) + return equivalent + + def equivalent_index(self, operations): + """return the index of the equivalent operation if any""" + for i, op in enumerate(reversed(operations)): + if op.__class__ is self.__class__: + return -(i+1) + return None + + +class SingleLastOperation(SingleOperation): + """special operation which should be called once and after all other + operations + """ + def insert_index(self): + return None + + +class SendMailOp(SingleLastOperation): + def __init__(self, session, msg=None, recipients=None, **kwargs): + # may not specify msg yet, as + # `cubicweb.sobjects.supervision.SupervisionMailOp` + if msg is not None: + assert recipients + self.to_send = [(msg, recipients)] + else: + assert recipients is None + self.to_send = [] + super(SendMailOp, self).__init__(session, **kwargs) + + def register(self, session): + previous = super(SendMailOp, self).register(session) + if previous: + self.to_send = previous.to_send + self.to_send + + def commit_event(self): + self.session.repo.threaded_task(self.sendmails) + + def sendmails(self): + self.session.vreg.config.sendmails(self.to_send) + + +class RQLPrecommitOperation(Operation): + def precommit_event(self): + execute = self.session.unsafe_execute + for rql in self.rqls: + execute(*rql) diff -r 15d541321a8c -r 74c1597f8a82 server/hookhelper.py --- a/server/hookhelper.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/hookhelper.py Wed Jan 20 10:13:45 2010 +0100 @@ -7,87 +7,23 @@ """ __docformat__ = "restructuredtext en" -from cubicweb import RepositoryError -from cubicweb.server.pool import SingleLastOperation - -def entity_oldnewvalue(entity, attr): - """returns the couple (old attr value, new attr value) +from logilab.common.deprecation import deprecated, class_moved - NOTE: will only work in a before_update_entity hook - """ - # get new value and remove from local dict to force a db query to - # fetch old value - newvalue = entity.pop(attr, None) - oldvalue = getattr(entity, attr) - if newvalue is not None: - entity[attr] = newvalue - return oldvalue, newvalue +from cubicweb import RepositoryError +from cubicweb.server import hook -def rproperty(session, rtype, eidfrom, eidto, rprop): - rschema = session.repo.schema[rtype] - subjtype = session.describe(eidfrom)[0] - objtype = session.describe(eidto)[0] - return rschema.rproperty(subjtype, objtype, rprop) - -def check_internal_entity(session, eid, internal_names): - """check that the entity's name is not in the internal_names list. - raise a RepositoryError if so, else return the entity's name - """ - name = session.entity_from_eid(eid).name - if name in internal_names: - raise RepositoryError('%s entity can\'t be deleted' % name) - return name - -def get_user_sessions(repo, ueid): - for session in repo._sessions.values(): - if ueid == session.user.eid: - yield session - - -# mail related ################################################################ +@deprecated('[3.6] entity_oldnewvalue should be imported from cw.server.hook') +def entity_oldnewvalue(entity, attr): + """return the "name" attribute of the entity with the given eid""" + return hook.entity_oldnewvalue(entity, attr) -class SendMailOp(SingleLastOperation): - def __init__(self, session, msg=None, recipients=None, **kwargs): - # may not specify msg yet, as - # `cubicweb.sobjects.supervision.SupervisionMailOp` - if msg is not None: - assert recipients - self.to_send = [(msg, recipients)] - else: - assert recipients is None - self.to_send = [] - super(SendMailOp, self).__init__(session, **kwargs) - - def register(self, session): - previous = super(SendMailOp, self).register(session) - if previous: - self.to_send = previous.to_send + self.to_send - - def commit_event(self): - self.repo.threaded_task(self.sendmails) - - def sendmails(self): - self.config.sendmails(self.to_send) +@deprecated('[3.6] entity_name is deprecated, use entity.name') +def entity_name(session, eid): + """return the "name" attribute of the entity with the given eid""" + return session.entity_from_eid(eid).name - -# state related ############################################################### +@deprecated('[3.6] rproperty is deprecated, use session.schema_rproperty') +def rproperty(session, rtype, eidfrom, eidto, rprop): + return session.rproperty(rtype, eidfrom, eidto, rprop) -def previous_state(session, eid): - """return the state of the entity with the given eid, - usually since it's changing in the current transaction. Due to internal - relation hooks, the relation may has been deleted at this point, so - we have handle that - """ - # don't check eid in session.transaction_data.get('neweids', ()), we don't - # want to miss previous state of entity whose state change in the same - # transaction as it's being created - pending = session.transaction_data.get('pendingrelations', ()) - for eidfrom, rtype, eidto in reversed(pending): - if rtype == 'in_state' and eidfrom == eid: - rset = session.execute('Any S,N WHERE S eid %(x)s, S name N', - {'x': eidto}, 'x') - return rset.get_entity(0, 0) - rset = session.execute('Any S,N WHERE X eid %(x)s, X in_state S, S name N', - {'x': eid}, 'x') - if rset: - return rset.get_entity(0, 0) +SendMailOp = class_moved(hook.SendMailOp) diff -r 15d541321a8c -r 74c1597f8a82 server/hooks.py --- a/server/hooks.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,841 +0,0 @@ -"""Core hooks: check schema validity, unsure we are not deleting necessary -entities... - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -from datetime import datetime - -from cubicweb import UnknownProperty, ValidationError, BadConnectionId -from cubicweb.schema import RQLConstraint, RQLUniqueConstraint -from cubicweb.server.pool import Operation, LateOperation, PreCommitOperation -from cubicweb.server.hookhelper import (check_internal_entity, - get_user_sessions, rproperty) -from cubicweb.server.repository import FTIndexEntityOp - -# special relations that don't have to be checked for integrity, usually -# because they are handled internally by hooks (so we trust ourselves) -DONT_CHECK_RTYPES_ON_ADD = set(('owned_by', 'created_by', - 'is', 'is_instance_of', - 'wf_info_for', 'from_state', 'to_state')) -DONT_CHECK_RTYPES_ON_DEL = set(('is', 'is_instance_of', - 'wf_info_for', 'from_state', 'to_state')) - - -def relation_deleted(session, eidfrom, rtype, eidto): - session.transaction_data.setdefault('pendingrelations', []).append( - (eidfrom, rtype, eidto)) - -def eschema_type_eid(session, etype): - """get eid of the CWEType entity for the given yams type""" - eschema = session.repo.schema.eschema(etype) - # eschema.eid is None if schema has been readen from the filesystem, not - # from the database (eg during tests) - if eschema.eid is None: - eschema.eid = session.unsafe_execute( - 'Any X WHERE X is CWEType, X name %(name)s', - {'name': str(etype)})[0][0] - return eschema.eid - - -# base meta-data handling ###################################################### - -def setctime_before_add_entity(session, entity): - """before create a new entity -> set creation and modification date - - this is a conveniency hook, you shouldn't have to disable it - """ - timestamp = datetime.now() - entity.setdefault('creation_date', timestamp) - entity.setdefault('modification_date', timestamp) - if not session.get_shared_data('do-not-insert-cwuri'): - entity.setdefault('cwuri', u'%seid/%s' % (session.base_url(), entity.eid)) - - -def setmtime_before_update_entity(session, entity): - """update an entity -> set modification date""" - entity.setdefault('modification_date', datetime.now()) - - -class SetCreatorOp(PreCommitOperation): - - def precommit_event(self): - session = self.session - if self.entity.eid in session.transaction_data.get('pendingeids', ()): - # entity have been created and deleted in the same transaction - return - if not self.entity.created_by: - session.add_relation(self.entity.eid, 'created_by', session.user.eid) - - -def setowner_after_add_entity(session, entity): - """create a new entity -> set owner and creator metadata""" - asession = session.actual_session() - if not asession.is_internal_session: - session.add_relation(entity.eid, 'owned_by', asession.user.eid) - SetCreatorOp(asession, entity=entity) - - -def setis_after_add_entity(session, entity): - """create a new entity -> set is relation""" - if hasattr(entity, '_cw_recreating'): - return - try: - #session.add_relation(entity.eid, 'is', - # eschema_type_eid(session, entity.id)) - session.system_sql('INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)' - % (entity.eid, eschema_type_eid(session, entity.id))) - except IndexError: - # during schema serialization, skip - return - for etype in entity.e_schema.ancestors() + [entity.e_schema]: - #session.add_relation(entity.eid, 'is_instance_of', - # eschema_type_eid(session, etype)) - session.system_sql('INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)' - % (entity.eid, eschema_type_eid(session, etype))) - - -def setowner_after_add_user(session, entity): - """when a user has been created, add owned_by relation on itself""" - session.add_relation(entity.eid, 'owned_by', entity.eid) - - -def fti_update_after_add_relation(session, eidfrom, rtype, eidto): - """sync fulltext index when relevant relation is added. Reindexing the - contained entity is enough since it will implicitly reindex the container - entity. - """ - ftcontainer = session.repo.schema.rschema(rtype).fulltext_container - if ftcontainer == 'subject': - FTIndexEntityOp(session, entity=session.entity_from_eid(eidto)) - elif ftcontainer == 'object': - FTIndexEntityOp(session, entity=session.entity_from_eid(eidfrom)) - - -def fti_update_after_delete_relation(session, eidfrom, rtype, eidto): - """sync fulltext index when relevant relation is deleted. Reindexing both - entities is necessary. - """ - if session.repo.schema.rschema(rtype).fulltext_container: - FTIndexEntityOp(session, entity=session.entity_from_eid(eidto)) - FTIndexEntityOp(session, entity=session.entity_from_eid(eidfrom)) - - -class SyncOwnersOp(PreCommitOperation): - - def precommit_event(self): - self.session.unsafe_execute('SET X owned_by U WHERE C owned_by U, C eid %(c)s,' - 'NOT EXISTS(X owned_by U, X eid %(x)s)', - {'c': self.compositeeid, 'x': self.composedeid}, - ('c', 'x')) - - -def sync_owner_after_add_composite_relation(session, eidfrom, rtype, eidto): - """when adding composite relation, the composed should have the same owners - has the composite - """ - if rtype == 'wf_info_for': - # skip this special composite relation # XXX (syt) why? - return - composite = rproperty(session, rtype, eidfrom, eidto, 'composite') - if composite == 'subject': - SyncOwnersOp(session, compositeeid=eidfrom, composedeid=eidto) - elif composite == 'object': - SyncOwnersOp(session, compositeeid=eidto, composedeid=eidfrom) - - -def _register_metadata_hooks(hm): - """register meta-data related hooks on the hooks manager""" - hm.register_hook(setctime_before_add_entity, 'before_add_entity', '') - hm.register_hook(setmtime_before_update_entity, 'before_update_entity', '') - hm.register_hook(setowner_after_add_entity, 'after_add_entity', '') - hm.register_hook(sync_owner_after_add_composite_relation, 'after_add_relation', '') - hm.register_hook(fti_update_after_add_relation, 'after_add_relation', '') - hm.register_hook(fti_update_after_delete_relation, 'after_delete_relation', '') - if 'is' in hm.schema: - hm.register_hook(setis_after_add_entity, 'after_add_entity', '') - if 'CWUser' in hm.schema: - hm.register_hook(setowner_after_add_user, 'after_add_entity', 'CWUser') - - -# core hooks ################################################################## - -class DelayedDeleteOp(PreCommitOperation): - """delete the object of composite relation except if the relation - has actually been redirected to another composite - """ - - def precommit_event(self): - session = self.session - # don't do anything if the entity is being created or deleted - if not (self.eid in session.transaction_data.get('pendingeids', ()) or - self.eid in session.transaction_data.get('neweids', ())): - etype = session.describe(self.eid)[0] - if self.role == 'subject': - rql = 'DELETE %s X WHERE X eid %%(x)s, NOT X %s Y' - else: # self.role == 'object': - rql = 'DELETE %s X WHERE X eid %%(x)s, NOT Y %s X' - session.unsafe_execute(rql % (etype, self.rtype), {'x': self.eid}, 'x') - - -def handle_composite_before_del_relation(session, eidfrom, rtype, eidto): - """delete the object of composite relation""" - # if the relation is being delete, don't delete composite's components - # automatically - pendingrdefs = session.transaction_data.get('pendingrdefs', ()) - if (session.describe(eidfrom)[0], rtype, session.describe(eidto)[0]) in pendingrdefs: - return - composite = rproperty(session, rtype, eidfrom, eidto, 'composite') - if composite == 'subject': - DelayedDeleteOp(session, eid=eidto, rtype=rtype, role='object') - elif composite == 'object': - DelayedDeleteOp(session, eid=eidfrom, rtype=rtype, role='subject') - - -def before_del_group(session, eid): - """check that we don't remove the owners group""" - check_internal_entity(session, eid, ('owners',)) - - -# schema validation hooks ##################################################### - -class CheckConstraintsOperation(LateOperation): - """check a new relation satisfy its constraints - """ - def precommit_event(self): - eidfrom, rtype, eidto = self.rdef - # first check related entities have not been deleted in the same - # transaction - pending = self.session.transaction_data.get('pendingeids', ()) - if eidfrom in pending: - return - if eidto in pending: - return - for constraint in self.constraints: - try: - constraint.repo_check(self.session, eidfrom, rtype, eidto) - except NotImplementedError: - self.critical('can\'t check constraint %s, not supported', - constraint) - - def commit_event(self): - pass - - -def cstrcheck_after_add_relation(session, eidfrom, rtype, eidto): - """check the relation satisfy its constraints - - this is delayed to a precommit time operation since other relation which - will make constraint satisfied may be added later. - """ - if session.is_super_session: - return - constraints = rproperty(session, rtype, eidfrom, eidto, 'constraints') - if constraints: - # XXX get only RQL[Unique]Constraints? - CheckConstraintsOperation(session, constraints=constraints, - rdef=(eidfrom, rtype, eidto)) - -def uniquecstrcheck_before_modification(session, entity): - if session.is_super_session: - return - eschema = entity.e_schema - for attr in entity.edited_attributes: - val = entity[attr] - if val is None: - continue - if eschema.subjrels[attr].final and \ - eschema.has_unique_values(attr): - rql = '%s X WHERE X %s %%(val)s' % (entity.e_schema, attr) - rset = session.unsafe_execute(rql, {'val': val}) - if rset and rset[0][0] != entity.eid: - msg = session._('the value "%s" is already used, use another one') - raise ValidationError(entity.eid, {attr: msg % val}) - - -def cstrcheck_after_update_attributes(session, entity): - if session.is_super_session: - return - eschema = entity.e_schema - for attr in entity.edited_attributes: - if eschema.subjrels[attr].final: - constraints = [c for c in entity.e_schema.constraints(attr) - if isinstance(c, (RQLConstraint, RQLUniqueConstraint))] - if constraints: - CheckConstraintsOperation(session, rdef=(entity.eid, attr, None), - constraints=constraints) - - -class CheckRequiredRelationOperation(LateOperation): - """checking relation cardinality has to be done after commit in - case the relation is being replaced - """ - eid, rtype = None, None - - def precommit_event(self): - # recheck pending eids - if self.eid in self.session.transaction_data.get('pendingeids', ()): - return - if self.rtype in self.session.transaction_data.get('pendingrtypes', ()): - return - if self.session.unsafe_execute(*self._rql()).rowcount < 1: - etype = self.session.describe(self.eid)[0] - _ = self.session._ - msg = _('at least one relation %(rtype)s is required on %(etype)s (%(eid)s)') - msg %= {'rtype': _(self.rtype), 'etype': _(etype), 'eid': self.eid} - raise ValidationError(self.eid, {self.rtype: msg}) - - def commit_event(self): - pass - - def _rql(self): - raise NotImplementedError() - - -class CheckSRelationOp(CheckRequiredRelationOperation): - """check required subject relation""" - def _rql(self): - return 'Any O WHERE S eid %%(x)s, S %s O' % self.rtype, {'x': self.eid}, 'x' - - -class CheckORelationOp(CheckRequiredRelationOperation): - """check required object relation""" - def _rql(self): - return 'Any S WHERE O eid %%(x)s, S %s O' % self.rtype, {'x': self.eid}, 'x' - - -def checkrel_if_necessary(session, opcls, rtype, eid): - """check an equivalent operation has not already been added""" - for op in session.pending_operations: - if isinstance(op, opcls) and op.rtype == rtype and op.eid == eid: - break - else: - opcls(session, rtype=rtype, eid=eid) - - -def cardinalitycheck_after_add_entity(session, entity): - """check cardinalities are satisfied""" - if session.is_super_session: - return - eid = entity.eid - for rschema, targetschemas, x in entity.e_schema.relation_definitions(): - # skip automatically handled relations - if rschema.type in DONT_CHECK_RTYPES_ON_ADD: - continue - if x == 'subject': - subjtype = entity.e_schema - objtype = targetschemas[0].type - cardindex = 0 - opcls = CheckSRelationOp - else: - subjtype = targetschemas[0].type - objtype = entity.e_schema - cardindex = 1 - opcls = CheckORelationOp - card = rschema.rproperty(subjtype, objtype, 'cardinality') - if card[cardindex] in '1+': - checkrel_if_necessary(session, opcls, rschema.type, eid) - -def cardinalitycheck_before_del_relation(session, eidfrom, rtype, eidto): - """check cardinalities are satisfied""" - if session.is_super_session: - return - if rtype in DONT_CHECK_RTYPES_ON_DEL: - return - card = rproperty(session, rtype, eidfrom, eidto, 'cardinality') - pendingrdefs = session.transaction_data.get('pendingrdefs', ()) - if (session.describe(eidfrom)[0], rtype, session.describe(eidto)[0]) in pendingrdefs: - return - pendingeids = session.transaction_data.get('pendingeids', ()) - if card[0] in '1+' and not eidfrom in pendingeids: - checkrel_if_necessary(session, CheckSRelationOp, rtype, eidfrom) - if card[1] in '1+' and not eidto in pendingeids: - checkrel_if_necessary(session, CheckORelationOp, rtype, eidto) - - -def _register_core_hooks(hm): - hm.register_hook(handle_composite_before_del_relation, 'before_delete_relation', '') - hm.register_hook(before_del_group, 'before_delete_entity', 'CWGroup') - - #hm.register_hook(cstrcheck_before_update_entity, 'before_update_entity', '') - hm.register_hook(cardinalitycheck_after_add_entity, 'after_add_entity', '') - hm.register_hook(cardinalitycheck_before_del_relation, 'before_delete_relation', '') - hm.register_hook(cstrcheck_after_add_relation, 'after_add_relation', '') - hm.register_hook(uniquecstrcheck_before_modification, 'before_add_entity', '') - hm.register_hook(uniquecstrcheck_before_modification, 'before_update_entity', '') - hm.register_hook(cstrcheck_after_update_attributes, 'after_add_entity', '') - hm.register_hook(cstrcheck_after_update_attributes, 'after_update_entity', '') - -# user/groups synchronisation ################################################# - -class GroupOperation(Operation): - """base class for group operation""" - geid = None - def __init__(self, session, *args, **kwargs): - """override to get the group name before actual groups manipulation: - - we may temporarily loose right access during a commit event, so - no query should be emitted while comitting - """ - rql = 'Any N WHERE G eid %(x)s, G name N' - result = session.execute(rql, {'x': kwargs['geid']}, 'x', build_descr=False) - Operation.__init__(self, session, *args, **kwargs) - self.group = result[0][0] - - -class DeleteGroupOp(GroupOperation): - """synchronize user when a in_group relation has been deleted""" - def commit_event(self): - """the observed connections pool has been commited""" - groups = self.cnxuser.groups - try: - groups.remove(self.group) - except KeyError: - self.error('user %s not in group %s', self.cnxuser, self.group) - return - - -def after_del_in_group(session, fromeid, rtype, toeid): - """modify user permission, need to update users""" - for session_ in get_user_sessions(session.repo, fromeid): - DeleteGroupOp(session, cnxuser=session_.user, geid=toeid) - - -class AddGroupOp(GroupOperation): - """synchronize user when a in_group relation has been added""" - def commit_event(self): - """the observed connections pool has been commited""" - groups = self.cnxuser.groups - if self.group in groups: - self.warning('user %s already in group %s', self.cnxuser, - self.group) - return - groups.add(self.group) - - -def after_add_in_group(session, fromeid, rtype, toeid): - """modify user permission, need to update users""" - for session_ in get_user_sessions(session.repo, fromeid): - AddGroupOp(session, cnxuser=session_.user, geid=toeid) - - -class DelUserOp(Operation): - """synchronize user when a in_group relation has been added""" - def __init__(self, session, cnxid): - self.cnxid = cnxid - Operation.__init__(self, session) - - def commit_event(self): - """the observed connections pool has been commited""" - try: - self.repo.close(self.cnxid) - except BadConnectionId: - pass # already closed - - -def after_del_user(session, eid): - """modify user permission, need to update users""" - for session_ in get_user_sessions(session.repo, eid): - DelUserOp(session, session_.id) - - -def _register_usergroup_hooks(hm): - """register user/group related hooks on the hooks manager""" - hm.register_hook(after_del_user, 'after_delete_entity', 'CWUser') - hm.register_hook(after_add_in_group, 'after_add_relation', 'in_group') - hm.register_hook(after_del_in_group, 'after_delete_relation', 'in_group') - - -# workflow handling ########################################################### - -from cubicweb.entities.wfobjs import WorkflowTransition, WorkflowException - -def _change_state(session, x, oldstate, newstate): - nocheck = session.transaction_data.setdefault('skip-security', set()) - nocheck.add((x, 'in_state', oldstate)) - nocheck.add((x, 'in_state', newstate)) - # delete previous state first in case we're using a super session - fromsource = session.describe(x)[1] - # don't try to remove previous state if in_state isn't stored in the system - # source - if fromsource == 'system' or \ - not session.repo.sources_by_uri[fromsource].support_relation('in_state'): - session.delete_relation(x, 'in_state', oldstate) - session.add_relation(x, 'in_state', newstate) - - -class FireAutotransitionOp(PreCommitOperation): - """try to fire auto transition after state changes""" - - def precommit_event(self): - session = self.session - entity = self.entity - autotrs = list(entity.possible_transitions('auto')) - if autotrs: - assert len(autotrs) == 1 - entity.fire_transition(autotrs[0]) - - -def before_add_trinfo(session, entity): - """check the transition is allowed, add missing information. Expect that: - * wf_info_for inlined relation is set - * by_transition or to_state (managers only) inlined relation is set - """ - # first retreive entity to which the state change apply - try: - foreid = entity['wf_info_for'] - except KeyError: - msg = session._('mandatory relation') - raise ValidationError(entity.eid, {'wf_info_for': msg}) - forentity = session.entity_from_eid(foreid) - # then check it has a workflow set, unless we're in the process of changing - # entity's workflow - if session.transaction_data.get((forentity.eid, 'customwf')): - wfeid = session.transaction_data[(forentity.eid, 'customwf')] - wf = session.entity_from_eid(wfeid) - else: - wf = forentity.current_workflow - if wf is None: - msg = session._('related entity has no workflow set') - raise ValidationError(entity.eid, {None: msg}) - # then check it has a state set - fromstate = forentity.current_state - if fromstate is None: - msg = session._('related entity has no state') - raise ValidationError(entity.eid, {None: msg}) - # True if we are coming back from subworkflow - swtr = session.transaction_data.pop((forentity.eid, 'subwfentrytr'), None) - cowpowers = session.is_super_session or 'managers' in session.user.groups - # no investigate the requested state change... - try: - treid = entity['by_transition'] - except KeyError: - # no transition set, check user is a manager and destination state is - # specified (and valid) - if not cowpowers: - msg = session._('mandatory relation') - raise ValidationError(entity.eid, {'by_transition': msg}) - deststateeid = entity.get('to_state') - if not deststateeid: - msg = session._('mandatory relation') - raise ValidationError(entity.eid, {'by_transition': msg}) - deststate = wf.state_by_eid(deststateeid) - if deststate is None: - msg = entity.req._("state doesn't belong to entity's current workflow") - raise ValidationError(entity.eid, {'to_state': msg}) - else: - # check transition is valid and allowed, unless we're coming back from - # subworkflow - tr = session.entity_from_eid(treid) - if swtr is None: - if tr is None: - msg = session._("transition doesn't belong to entity's workflow") - raise ValidationError(entity.eid, {'by_transition': msg}) - if not tr.has_input_state(fromstate): - _ = session._ - msg = _("transition %(tr)s isn't allowed from %(st)s") % {'tr': _(tr.name), - 'st': _(fromstate.name), - } - raise ValidationError(entity.eid, {'by_transition': msg}) - if not tr.may_be_fired(foreid): - msg = session._("transition may not be fired") - raise ValidationError(entity.eid, {'by_transition': msg}) - if entity.get('to_state'): - deststateeid = entity['to_state'] - if not cowpowers and deststateeid != tr.destination().eid: - msg = session._("transition isn't allowed") - raise ValidationError(entity.eid, {'by_transition': msg}) - if swtr is None: - deststate = session.entity_from_eid(deststateeid) - if not cowpowers and deststate is None: - msg = entity.req._("state doesn't belong to entity's workflow") - raise ValidationError(entity.eid, {'to_state': msg}) - else: - deststateeid = tr.destination().eid - # everything is ok, add missing information on the trinfo entity - entity['from_state'] = fromstate.eid - entity['to_state'] = deststateeid - nocheck = session.transaction_data.setdefault('skip-security', set()) - nocheck.add((entity.eid, 'from_state', fromstate.eid)) - nocheck.add((entity.eid, 'to_state', deststateeid)) - FireAutotransitionOp(session, entity=forentity) - - -def after_add_trinfo(session, entity): - """change related entity state""" - _change_state(session, entity['wf_info_for'], - entity['from_state'], entity['to_state']) - forentity = session.entity_from_eid(entity['wf_info_for']) - assert forentity.current_state.eid == entity['to_state'], ( - forentity.eid, forentity.current_state.name) - if forentity.main_workflow.eid != forentity.current_workflow.eid: - SubWorkflowExitOp(session, forentity=forentity, trinfo=entity) - -class SubWorkflowExitOp(PreCommitOperation): - def precommit_event(self): - session = self.session - forentity = self.forentity - trinfo = self.trinfo - # we're in a subworkflow, check if we've reached an exit point - wftr = forentity.subworkflow_input_transition() - if wftr is None: - # inconsistency detected - msg = session._("state doesn't belong to entity's current workflow") - raise ValidationError(self.trinfo.eid, {'to_state': msg}) - tostate = wftr.get_exit_point(forentity, trinfo['to_state']) - if tostate is not None: - # reached an exit point - msg = session._('exiting from subworkflow %s') - msg %= session._(forentity.current_workflow.name) - session.transaction_data[(forentity.eid, 'subwfentrytr')] = True - # XXX iirk - req = forentity.req - forentity.req = session.super_session - try: - trinfo = forentity.change_state(tostate, msg, u'text/plain', - tr=wftr) - finally: - forentity.req = req - - -class SetInitialStateOp(PreCommitOperation): - """make initial state be a default state""" - - def precommit_event(self): - session = self.session - entity = self.entity - # if there is an initial state and the entity's state is not set, - # use the initial state as a default state - pendingeids = session.transaction_data.get('pendingeids', ()) - if not entity.eid in pendingeids and not entity.in_state and \ - entity.main_workflow: - state = entity.main_workflow.initial - if state: - # use super session to by-pass security checks - session.super_session.add_relation(entity.eid, 'in_state', - state.eid) - - -def set_initial_state_after_add(session, entity): - SetInitialStateOp(session, entity=entity) - - -def before_add_in_state(session, eidfrom, rtype, eidto): - """check state apply, in case of direct in_state change using unsafe_execute - """ - nocheck = session.transaction_data.setdefault('skip-security', set()) - if (eidfrom, 'in_state', eidto) in nocheck: - # state changed through TrInfo insertion, so we already know it's ok - return - entity = session.entity_from_eid(eidfrom) - mainwf = entity.main_workflow - if mainwf is None: - msg = session._('entity has no workflow set') - raise ValidationError(entity.eid, {None: msg}) - for wf in mainwf.iter_workflows(): - if wf.state_by_eid(eidto): - break - else: - msg = session._("state doesn't belong to entity's workflow. You may " - "want to set a custom workflow for this entity first.") - raise ValidationError(eidfrom, {'in_state': msg}) - if entity.current_workflow and wf.eid != entity.current_workflow.eid: - msg = session._("state doesn't belong to entity's current workflow") - raise ValidationError(eidfrom, {'in_state': msg}) - - -class CheckTrExitPoint(PreCommitOperation): - - def precommit_event(self): - tr = self.session.entity_from_eid(self.treid) - outputs = set() - for ep in tr.subworkflow_exit: - if ep.subwf_state.eid in outputs: - msg = self.session._("can't have multiple exits on the same state") - raise ValidationError(self.treid, {'subworkflow_exit': msg}) - outputs.add(ep.subwf_state.eid) - - -def after_add_subworkflow_exit(session, eidfrom, rtype, eidto): - CheckTrExitPoint(session, treid=eidfrom) - - -class WorkflowChangedOp(PreCommitOperation): - """fix entity current state when changing its workflow""" - - def precommit_event(self): - # notice that enforcement that new workflow apply to the entity's type is - # done by schema rule, no need to check it here - session = self.session - pendingeids = session.transaction_data.get('pendingeids', ()) - if self.eid in pendingeids: - return - entity = session.entity_from_eid(self.eid) - # check custom workflow has not been rechanged to another one in the same - # transaction - mainwf = entity.main_workflow - if mainwf.eid == self.wfeid: - deststate = mainwf.initial - if not deststate: - msg = session._('workflow has no initial state') - raise ValidationError(entity.eid, {'custom_workflow': msg}) - if mainwf.state_by_eid(entity.current_state.eid): - # nothing to do - return - # if there are no history, simply go to new workflow's initial state - if not entity.workflow_history: - if entity.current_state.eid != deststate.eid: - _change_state(session, entity.eid, - entity.current_state.eid, deststate.eid) - return - msg = session._('workflow changed to "%s"') - msg %= session._(mainwf.name) - session.transaction_data[(entity.eid, 'customwf')] = self.wfeid - entity.change_state(deststate, msg, u'text/plain') - - -def set_custom_workflow(session, eidfrom, rtype, eidto): - WorkflowChangedOp(session, eid=eidfrom, wfeid=eidto) - - -def del_custom_workflow(session, eidfrom, rtype, eidto): - entity = session.entity_from_eid(eidfrom) - typewf = entity.cwetype_workflow() - if typewf is not None: - WorkflowChangedOp(session, eid=eidfrom, wfeid=typewf.eid) - - -def after_del_workflow(session, eid): - # workflow cleanup - session.execute('DELETE State X WHERE NOT X state_of Y') - session.execute('DELETE Transition X WHERE NOT X transition_of Y') - - -def _register_wf_hooks(hm): - """register workflow related hooks on the hooks manager""" - if 'in_state' in hm.schema: - hm.register_hook(before_add_trinfo, 'before_add_entity', 'TrInfo') - hm.register_hook(after_add_trinfo, 'after_add_entity', 'TrInfo') - #hm.register_hook(relation_deleted, 'before_delete_relation', 'in_state') - for eschema in hm.schema.entities(): - if 'in_state' in eschema.subject_relations(): - hm.register_hook(set_initial_state_after_add, 'after_add_entity', - str(eschema)) - hm.register_hook(set_custom_workflow, 'after_add_relation', 'custom_workflow') - hm.register_hook(del_custom_workflow, 'after_delete_relation', 'custom_workflow') - hm.register_hook(after_del_workflow, 'after_delete_entity', 'Workflow') - hm.register_hook(before_add_in_state, 'before_add_relation', 'in_state') - hm.register_hook(after_add_subworkflow_exit, 'after_add_relation', 'subworkflow_exit') - - -# CWProperty hooks ############################################################# - - -class DelCWPropertyOp(Operation): - """a user's custom properties has been deleted""" - - def commit_event(self): - """the observed connections pool has been commited""" - try: - del self.epropdict[self.key] - except KeyError: - self.error('%s has no associated value', self.key) - - -class ChangeCWPropertyOp(Operation): - """a user's custom properties has been added/changed""" - - def commit_event(self): - """the observed connections pool has been commited""" - self.epropdict[self.key] = self.value - - -class AddCWPropertyOp(Operation): - """a user's custom properties has been added/changed""" - - def commit_event(self): - """the observed connections pool has been commited""" - eprop = self.eprop - if not eprop.for_user: - self.repo.vreg.eprop_values[eprop.pkey] = eprop.value - # if for_user is set, update is handled by a ChangeCWPropertyOp operation - - -def after_add_eproperty(session, entity): - key, value = entity.pkey, entity.value - try: - value = session.vreg.typed_value(key, value) - except UnknownProperty: - raise ValidationError(entity.eid, {'pkey': session._('unknown property key')}) - except ValueError, ex: - raise ValidationError(entity.eid, {'value': session._(str(ex))}) - if not session.user.matching_groups('managers'): - session.add_relation(entity.eid, 'for_user', session.user.eid) - else: - AddCWPropertyOp(session, eprop=entity) - - -def after_update_eproperty(session, entity): - if not ('pkey' in entity.edited_attributes or - 'value' in entity.edited_attributes): - return - key, value = entity.pkey, entity.value - try: - value = session.vreg.typed_value(key, value) - except UnknownProperty: - return - except ValueError, ex: - raise ValidationError(entity.eid, {'value': session._(str(ex))}) - if entity.for_user: - for session_ in get_user_sessions(session.repo, entity.for_user[0].eid): - ChangeCWPropertyOp(session, epropdict=session_.user.properties, - key=key, value=value) - else: - # site wide properties - ChangeCWPropertyOp(session, epropdict=session.vreg.eprop_values, - key=key, value=value) - - -def before_del_eproperty(session, eid): - for eidfrom, rtype, eidto in session.transaction_data.get('pendingrelations', ()): - if rtype == 'for_user' and eidfrom == eid: - # if for_user was set, delete has already been handled - break - else: - key = session.execute('Any K WHERE P eid %(x)s, P pkey K', - {'x': eid}, 'x')[0][0] - DelCWPropertyOp(session, epropdict=session.vreg.eprop_values, key=key) - - -def after_add_for_user(session, fromeid, rtype, toeid): - if not session.describe(fromeid)[0] == 'CWProperty': - return - key, value = session.execute('Any K,V WHERE P eid %(x)s,P pkey K,P value V', - {'x': fromeid}, 'x')[0] - if session.vreg.property_info(key)['sitewide']: - raise ValidationError(fromeid, - {'for_user': session._("site-wide property can't be set for user")}) - for session_ in get_user_sessions(session.repo, toeid): - ChangeCWPropertyOp(session, epropdict=session_.user.properties, - key=key, value=value) - - -def before_del_for_user(session, fromeid, rtype, toeid): - key = session.execute('Any K WHERE P eid %(x)s, P pkey K', - {'x': fromeid}, 'x')[0][0] - relation_deleted(session, fromeid, rtype, toeid) - for session_ in get_user_sessions(session.repo, toeid): - DelCWPropertyOp(session, epropdict=session_.user.properties, key=key) - - -def _register_eproperty_hooks(hm): - """register workflow related hooks on the hooks manager""" - hm.register_hook(after_add_eproperty, 'after_add_entity', 'CWProperty') - hm.register_hook(after_update_eproperty, 'after_update_entity', 'CWProperty') - hm.register_hook(before_del_eproperty, 'before_delete_entity', 'CWProperty') - hm.register_hook(after_add_for_user, 'after_add_relation', 'for_user') - hm.register_hook(before_del_for_user, 'before_delete_relation', 'for_user') diff -r 15d541321a8c -r 74c1597f8a82 server/hooksmanager.py --- a/server/hooksmanager.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/hooksmanager.py Wed Jan 20 10:13:45 2010 +0100 @@ -1,347 +1,10 @@ -"""Hooks management - -Hooks are called before / after any individual update of entities / relations -in the repository. - -Here is the prototype of the different hooks: - -* filtered on the entity's type: - - before_add_entity (session, entity) - after_add_entity (session, entity) - before_update_entity (session, entity) - after_update_entity (session, entity) - before_delete_entity (session, eid) - after_delete_entity (session, eid) - -* filtered on the relation's type: - - before_add_relation (session, fromeid, rtype, toeid) - after_add_relation (session, fromeid, rtype, toeid) - before_delete_relation (session, fromeid, rtype, toeid) - after_delete_relation (session, fromeid, rtype, toeid) - - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -ENTITIES_HOOKS = ('before_add_entity', 'after_add_entity', - 'before_update_entity', 'after_update_entity', - 'before_delete_entity', 'after_delete_entity') -RELATIONS_HOOKS = ('before_add_relation', 'after_add_relation' , - 'before_delete_relation','after_delete_relation') -SYSTEM_HOOKS = ('server_backup', 'server_restore', - 'server_startup', 'server_shutdown', - 'session_open', 'session_close') - -ALL_HOOKS = frozenset(ENTITIES_HOOKS + RELATIONS_HOOKS + SYSTEM_HOOKS) - -class HooksManager(object): - """handle hooks registration and calls - """ - verification_hooks_activated = True - - def __init__(self, schema): - self.set_schema(schema) - - def set_schema(self, schema): - self._hooks = {} - self.schema = schema - self._init_hooks(schema) - - def register_hooks(self, hooks): - """register a dictionary of hooks : - - {'event': {'entity or relation type': [callbacks list]}} - """ - for event, subevents in hooks.items(): - for subevent, callbacks in subevents.items(): - for callback in callbacks: - self.register_hook(callback, event, subevent) - - def register_hook(self, function, event, etype=''): - """register a function to call when occurs - - is an entity/relation type or an empty string. - - If etype is the empty string, the function will be called at each event, - else the function will be called only when event occurs on an entity or - relation of the given type. - """ - assert event in ALL_HOOKS, '%r NOT IN %r' % (event, ALL_HOOKS) - assert (not event in SYSTEM_HOOKS or not etype), (event, etype) - etype = etype or '' - try: - self._hooks[event][etype].append(function) - self.debug('registered hook %s on %s (%s)', event, etype or 'any', - function.func_name) - - except KeyError: - self.error('can\'t register hook %s on %s (%s)', - event, etype or 'any', function.func_name) - - def unregister_hook(self, function_or_cls, event=None, etype=''): - """unregister a function to call when occurs, or a Hook subclass. - In the later case, event/type information are extracted from the given - class. - """ - if isinstance(function_or_cls, type) and issubclass(function_or_cls, Hook): - for event, ertype in function_or_cls.register_to(): - for hook in self._hooks[event][ertype]: - if getattr(hook, 'im_self', None).__class__ is function_or_cls: - self._hooks[event][ertype].remove(hook) - self.info('unregister hook %s on %s (%s)', event, etype, - function_or_cls.__name__) - break - else: - self.warning("can't unregister hook %s on %s (%s), not found", - event, etype, function_or_cls.__name__) - else: - assert event in ALL_HOOKS, event - etype = etype or '' - self.info('unregister hook %s on %s (%s)', event, etype, - function_or_cls.func_name) - self._hooks[event][etype].remove(function_or_cls) - - def call_hooks(self, __event, __type='', *args, **kwargs): - """call hook matching event and optional type""" - if __type: - self.info('calling hooks for event %s (%s)', __event, __type) - else: - self.info('calling hooks for event %s', __event) - # call generic hooks first - for hook in self._hooks[__event]['']: - #print '[generic]', hook.__name__ - hook(*args, **kwargs) - if __type: - for hook in self._hooks[__event][__type]: - #print '[%s]'%__type, hook.__name__ - hook(*args, **kwargs) - - def _init_hooks(self, schema): - """initialize the hooks map""" - for hook_event in ENTITIES_HOOKS: - self._hooks[hook_event] = {'': []} - for etype in schema.entities(): - self._hooks[hook_event][etype] = [] - for hook_event in RELATIONS_HOOKS: - self._hooks[hook_event] = {'': []} - for r_type in schema.relations(): - self._hooks[hook_event][r_type] = [] - for hook_event in SYSTEM_HOOKS: - self._hooks[hook_event] = {'': []} - - def register_system_hooks(self, config): - """register system hooks according to the configuration""" - self.info('register core hooks') - from cubicweb.server.hooks import _register_metadata_hooks, _register_wf_hooks - _register_metadata_hooks(self) - self.info('register workflow hooks') - _register_wf_hooks(self) - if config.core_hooks: - from cubicweb.server.hooks import _register_core_hooks - _register_core_hooks(self) - if config.schema_hooks: - from cubicweb.server.schemahooks import _register_schema_hooks - self.info('register schema hooks') - _register_schema_hooks(self) - if config.usergroup_hooks: - from cubicweb.server.hooks import _register_usergroup_hooks - from cubicweb.server.hooks import _register_eproperty_hooks - self.info('register user/group hooks') - _register_usergroup_hooks(self) - _register_eproperty_hooks(self) - if config.security_hooks: - from cubicweb.server.securityhooks import register_security_hooks - self.info('register security hooks') - register_security_hooks(self) - if not self.verification_hooks_activated: - self.deactivate_verification_hooks() - - def deactivate_verification_hooks(self): - from cubicweb.server.hooks import (cardinalitycheck_after_add_entity, - cardinalitycheck_before_del_relation, - cstrcheck_after_add_relation, - uniquecstrcheck_before_modification) - self.warning('deactivating verification hooks') - self.verification_hooks_activated = False - self.unregister_hook(cardinalitycheck_after_add_entity, 'after_add_entity', '') - self.unregister_hook(cardinalitycheck_before_del_relation, 'before_delete_relation', '') - self.unregister_hook(cstrcheck_after_add_relation, 'after_add_relation', '') - self.unregister_hook(uniquecstrcheck_before_modification, 'before_add_entity', '') - self.unregister_hook(uniquecstrcheck_before_modification, 'before_update_entity', '') -# self.unregister_hook(tidy_html_fields('before_add_entity'), 'before_add_entity', '') -# self.unregister_hook(tidy_html_fields('before_update_entity'), 'before_update_entity', '') - - def reactivate_verification_hooks(self): - from cubicweb.server.hooks import (cardinalitycheck_after_add_entity, - cardinalitycheck_before_del_relation, - cstrcheck_after_add_relation, - uniquecstrcheck_before_modification) - self.warning('reactivating verification hooks') - self.verification_hooks_activated = True - self.register_hook(cardinalitycheck_after_add_entity, 'after_add_entity', '') - self.register_hook(cardinalitycheck_before_del_relation, 'before_delete_relation', '') - self.register_hook(cstrcheck_after_add_relation, 'after_add_relation', '') - self.register_hook(uniquecstrcheck_before_modification, 'before_add_entity', '') - self.register_hook(uniquecstrcheck_before_modification, 'before_update_entity', '') -# self.register_hook(tidy_html_fields('before_add_entity'), 'before_add_entity', '') -# self.register_hook(tidy_html_fields('before_update_entity'), 'before_update_entity', '') - -from cubicweb.selectors import yes -from cubicweb.appobject import AppObject - -class autoid(type): - """metaclass to create an unique 'id' attribute on the class using it""" - # XXX is this metaclass really necessary ? - def __new__(mcs, name, bases, classdict): - cls = super(autoid, mcs).__new__(mcs, name, bases, classdict) - cls.id = str(id(cls)) - return cls - -class Hook(AppObject): - __metaclass__ = autoid - __registry__ = 'hooks' - __select__ = yes() - # set this in derivated classes - events = None - accepts = None - enabled = True - - def __init__(self, event=None): - super(Hook, self).__init__() - self.event = event - - @classmethod - def registered(cls, vreg): - super(Hook, cls).registered(vreg) - return cls() - - @classmethod - def register_to(cls): - if not cls.enabled: - cls.warning('%s hook has been disabled', cls) - return - done = set() - assert isinstance(cls.events, (tuple, list)), \ - '%s: events is expected to be a tuple, not %s' % ( - cls, type(cls.events)) - for event in cls.events: - if event in SYSTEM_HOOKS: - assert not cls.accepts or cls.accepts == ('Any',), \ - '%s doesnt make sense on %s' % (cls.accepts, event) - cls.accepts = ('Any',) - for ertype in cls.accepts: - if (event, ertype) in done: - continue - yield event, ertype - done.add((event, ertype)) - try: - eschema = cls.schema.eschema(ertype) - except KeyError: - # relation schema - pass - else: - for eetype in eschema.specialized_by(): - if (event, eetype) in done: - continue - yield event, str(eetype) - done.add((event, eetype)) - - - def make_callback(self, event): - if len(self.events) == 1: - return self.call - return self.__class__(event=event).call - - def call(self): - raise NotImplementedError - -class SystemHook(Hook): - accepts = () - -from logging import getLogger -from cubicweb import set_log_methods -set_log_methods(HooksManager, getLogger('cubicweb.hooksmanager')) -set_log_methods(Hook, getLogger('cubicweb.hooks')) - -# base classes for relation propagation ######################################## - -from cubicweb.server.pool import PreCommitOperation - - -class PropagateSubjectRelationHook(Hook): - """propagate permissions and nosy list when new entity are added""" - events = ('after_add_relation',) - # to set in concrete class - rtype = None - subject_relations = None - object_relations = None - accepts = None # subject_relations + object_relations - - def call(self, session, fromeid, rtype, toeid): - for eid in (fromeid, toeid): - etype = session.describe(eid)[0] - if self.rtype not in self.schema.eschema(etype).subjrels: - return - if rtype in self.subject_relations: - meid, seid = fromeid, toeid - else: - assert rtype in self.object_relations - meid, seid = toeid, fromeid - session.unsafe_execute( - 'SET E %s P WHERE X %s P, X eid %%(x)s, E eid %%(e)s, NOT E %s P'\ - % (self.rtype, self.rtype, self.rtype), - {'x': meid, 'e': seid}, ('x', 'e')) - - -class PropagateSubjectRelationAddHook(Hook): - """propagate on existing entities when a permission or nosy list is added""" - events = ('after_add_relation',) - # to set in concrete class - rtype = None - subject_relations = None - object_relations = None - accepts = None # (self.rtype,) - - def call(self, session, fromeid, rtype, toeid): - eschema = self.schema.eschema(session.describe(fromeid)[0]) - execute = session.unsafe_execute - for rel in self.subject_relations: - if rel in eschema.subjrels: - execute('SET R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' - 'X %s R, NOT R %s P' % (rtype, rel, rtype), - {'x': fromeid, 'p': toeid}, 'x') - for rel in self.object_relations: - if rel in eschema.objrels: - execute('SET R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' - 'R %s X, NOT R %s P' % (rtype, rel, rtype), - {'x': fromeid, 'p': toeid}, 'x') - - -class PropagateSubjectRelationDelHook(Hook): - """propagate on existing entities when a permission is deleted""" - events = ('after_delete_relation',) - # to set in concrete class - rtype = None - subject_relations = None - object_relations = None - accepts = None # (self.rtype,) - - def call(self, session, fromeid, rtype, toeid): - eschema = self.schema.eschema(session.describe(fromeid)[0]) - execute = session.unsafe_execute - for rel in self.subject_relations: - if rel in eschema.subjrels: - execute('DELETE R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' - 'X %s R' % (rtype, rel), - {'x': fromeid, 'p': toeid}, 'x') - for rel in self.object_relations: - if rel in eschema.objrels: - execute('DELETE R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' - 'R %s X' % (rtype, rel), - {'x': fromeid, 'p': toeid}, 'x') +from logilab.common.deprecation import class_renamed, class_moved +from cubicweb.server import hook +SystemHook = class_renamed('SystemHook', hook.Hook) +PropagateSubjectRelationHook = class_renamed('PropagateSubjectRelationHook', + hook.PropagateSubjectRelationHook) +PropagateSubjectRelationAddHook = class_renamed('PropagateSubjectRelationAddHook', + hook.PropagateSubjectRelationAddHook) +PropagateSubjectRelationDelHook = class_renamed('PropagateSubjectRelationDelHook', + hook.PropagateSubjectRelationDelHook) +Hook = class_moved(hook.Hook) diff -r 15d541321a8c -r 74c1597f8a82 server/migractions.py --- a/server/migractions.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/migractions.py Wed Jan 20 10:13:45 2010 +0100 @@ -38,7 +38,7 @@ from cubicweb.schema import (META_RTYPES, VIRTUAL_RTYPES, CubicWebRelationSchema, order_eschemas) from cubicweb.dbapi import get_repository, repo_connect -from cubicweb.common.migration import MigrationHelper, yes +from cubicweb.migration import MigrationHelper, yes try: from cubicweb.server import SOURCE_TYPES, schemaserial as ss @@ -221,7 +221,7 @@ login, pwd = manager_userpasswd() while True: try: - self._cnx = repo_connect(self.repo, login, pwd) + self._cnx = repo_connect(self.repo, login, password=pwd) if not 'managers' in self._cnx.user(self.session).groups: print 'migration need an account in the managers group' else: @@ -263,7 +263,8 @@ def _create_context(self): """return a dictionary to use as migration script execution context""" context = super(ServerMigrationHelper, self)._create_context() - context.update({'checkpoint': self.checkpoint, + context.update({'commit': self.checkpoint, + 'checkpoint': deprecated('[3.6] use commit')(self.checkpoint), 'sql': self.sqlexec, 'rql': self.rqlexec, 'rqliter': self.rqliter, @@ -272,9 +273,9 @@ 'fsschema': self.fs_schema, 'session' : self.session, 'repo' : self.repo, - 'synchronize_schema': deprecated()(self.cmd_sync_schema_props_perms), - 'synchronize_eschema': deprecated()(self.cmd_sync_schema_props_perms), - 'synchronize_rschema': deprecated()(self.cmd_sync_schema_props_perms), + 'synchronize_schema': deprecated()(self.cmd_sync_schema_props_perms), # 3.4 + 'synchronize_eschema': deprecated()(self.cmd_sync_schema_props_perms), # 3.4 + 'synchronize_rschema': deprecated()(self.cmd_sync_schema_props_perms), # 3.4 }) return context @@ -294,7 +295,7 @@ from cubicweb.server.hooks import setowner_after_add_entity self.repo.hm.unregister_hook(setowner_after_add_entity, 'after_add_entity', '') - self.deactivate_verification_hooks() + self.cmd_deactivate_verification_hooks() self.info('executing %s', apc) confirm = self.confirm execscript_confirm = self.execscript_confirm @@ -308,7 +309,7 @@ if self.config.free_wheel: self.repo.hm.register_hook(setowner_after_add_entity, 'after_add_entity', '') - self.reactivate_verification_hooks() + self.cmd_reactivate_verification_hooks() def install_custom_sql_scripts(self, directory, driver): self.session.set_pool() # ensure pool is set @@ -327,35 +328,31 @@ # schema synchronization internals ######################################## - def _synchronize_permissions(self, ertype): + def _synchronize_permissions(self, erschema, teid): """permission synchronization for an entity or relation type""" - if ertype in VIRTUAL_RTYPES: - return - newrschema = self.fs_schema[ertype] - teid = self.repo.schema[ertype].eid - if 'update' in newrschema.ACTIONS or newrschema.final: + assert teid, erschema + if 'update' in erschema.ACTIONS or erschema.final: # entity type exprtype = u'ERQLExpression' else: # relation type exprtype = u'RRQLExpression' - assert teid, ertype gm = self.group_mapping() confirm = self.verbosity >= 2 # * remove possibly deprecated permission (eg in the persistent schema # but not in the new schema) # * synchronize existing expressions # * add new groups/expressions - for action in newrschema.ACTIONS: + for action in erschema.ACTIONS: perm = '%s_permission' % action # handle groups - newgroups = list(newrschema.get_groups(action)) + newgroups = list(erschema.get_groups(action)) for geid, gname in self.rqlexec('Any G, GN WHERE T %s G, G name GN, ' 'T eid %%(x)s' % perm, {'x': teid}, 'x', ask_confirm=False): if not gname in newgroups: if not confirm or self.confirm('remove %s permission of %s to %s?' - % (action, ertype, gname)): + % (action, erschema, gname)): self.rqlexec('DELETE T %s G WHERE G eid %%(x)s, T eid %s' % (perm, teid), {'x': geid}, 'x', ask_confirm=False) @@ -363,18 +360,18 @@ newgroups.remove(gname) for gname in newgroups: if not confirm or self.confirm('grant %s permission of %s to %s?' - % (action, ertype, gname)): + % (action, erschema, gname)): self.rqlexec('SET T %s G WHERE G eid %%(x)s, T eid %s' % (perm, teid), {'x': gm[gname]}, 'x', ask_confirm=False) # handle rql expressions - newexprs = dict((expr.expression, expr) for expr in newrschema.get_rqlexprs(action)) + newexprs = dict((expr.expression, expr) for expr in erschema.get_rqlexprs(action)) for expreid, expression in self.rqlexec('Any E, EX WHERE T %s E, E expression EX, ' 'T eid %s' % (perm, teid), ask_confirm=False): if not expression in newexprs: if not confirm or self.confirm('remove %s expression for %s permission of %s?' - % (expression, action, ertype)): + % (expression, action, erschema)): # deleting the relation will delete the expression entity self.rqlexec('DELETE T %s E WHERE E eid %%(x)s, T eid %s' % (perm, teid), @@ -384,7 +381,7 @@ for expression in newexprs.values(): expr = expression.expression if not confirm or self.confirm('add %s expression for %s permission of %s?' - % (expr, action, ertype)): + % (expr, action, erschema)): self.rqlexec('INSERT RQLExpression X: X exprtype %%(exprtype)s, ' 'X expression %%(expr)s, X mainvars %%(vars)s, T %s X ' 'WHERE T eid %%(x)s' % perm, @@ -392,7 +389,7 @@ 'vars': expression.mainvars, 'x': teid}, 'x', ask_confirm=False) - def _synchronize_rschema(self, rtype, syncrdefs=True, syncperms=True): + def _synchronize_rschema(self, rtype, syncrdefs=True, syncperms=True, syncprops=True): """synchronize properties of the persistent relation schema against its current definition: @@ -409,16 +406,17 @@ return self._synchronized.add(rtype) rschema = self.fs_schema.rschema(rtype) - self.rqlexecall(ss.updaterschema2rql(rschema), - ask_confirm=self.verbosity>=2) - reporschema = self.repo.schema.rschema(rtype) + if syncprops: + self.rqlexecall(ss.updaterschema2rql(rschema), + ask_confirm=self.verbosity>=2) if syncrdefs: - for subj, obj in rschema.iter_rdefs(): - if not reporschema.has_rdef(subj, obj): + reporschema = self.repo.schema.rschema(rtype) + for subj, obj in rschema.rdefs: + if (subj, obj) not in reporschema.rdefs: continue - self._synchronize_rdef_schema(subj, rschema, obj) - if syncperms: - self._synchronize_permissions(rtype) + self._synchronize_rdef_schema(subj, rschema, obj, + syncprops=syncprops, + syncperms=syncperms) def _synchronize_eschema(self, etype, syncperms=True): """synchronize properties of the persistent entity schema against @@ -464,13 +462,14 @@ reporschema = self.repo.schema.rschema(rschema) for subj in subjtypes: for obj in objtypes: - if not reporschema.has_rdef(subj, obj): + if (subj, obj) not in reporschema.rdefs: continue self._synchronize_rdef_schema(subj, rschema, obj) if syncperms: - self._synchronize_permissions(etype) + self._synchronize_permissions(eschema, repoeschema.eid) - def _synchronize_rdef_schema(self, subjtype, rtype, objtype): + def _synchronize_rdef_schema(self, subjtype, rtype, objtype, + syncperms=True, syncprops=True): """synchronize properties of the persistent relation definition schema against its current definition: * order and other properties @@ -485,45 +484,50 @@ if rschema.symetric: self._synchronized.add((objtype, rschema, subjtype)) confirm = self.verbosity >= 2 - # properties - self.rqlexecall(ss.updaterdef2rql(rschema, subjtype, objtype), - ask_confirm=confirm) - # constraints - newconstraints = list(rschema.rproperty(subjtype, objtype, 'constraints')) - # 1. remove old constraints and update constraints of the same type - # NOTE: don't use rschema.constraint_by_type because it may be - # out of sync with newconstraints when multiple - # constraints of the same type are used - for cstr in reporschema.rproperty(subjtype, objtype, 'constraints'): + if syncprops: + # properties + self.rqlexecall(ss.updaterdef2rql(rschema, subjtype, objtype), + ask_confirm=confirm) + # constraints + rdef = rschema.rdef(subjtype, objtype) + repordef = reporschema.rdef(subjtype, objtype) + newconstraints = list(rdef.constraints) + # 1. remove old constraints and update constraints of the same type + # NOTE: don't use rschema.constraint_by_type because it may be + # out of sync with newconstraints when multiple + # constraints of the same type are used + for cstr in repordef.constraints: + for newcstr in newconstraints: + if newcstr.type() == cstr.type(): + break + else: + newcstr = None + if newcstr is None: + self.rqlexec('DELETE X constrained_by C WHERE C eid %(x)s', + {'x': cstr.eid}, 'x', + ask_confirm=confirm) + self.rqlexec('DELETE CWConstraint C WHERE C eid %(x)s', + {'x': cstr.eid}, 'x', + ask_confirm=confirm) + else: + newconstraints.remove(newcstr) + values = {'x': cstr.eid, + 'v': unicode(newcstr.serialize())} + self.rqlexec('SET X value %(v)s WHERE X eid %(x)s', + values, 'x', ask_confirm=confirm) + # 2. add new constraints for newcstr in newconstraints: - if newcstr.type() == cstr.type(): - break - else: - newcstr = None - if newcstr is None: - self.rqlexec('DELETE X constrained_by C WHERE C eid %(x)s', - {'x': cstr.eid}, 'x', - ask_confirm=confirm) - self.rqlexec('DELETE CWConstraint C WHERE C eid %(x)s', - {'x': cstr.eid}, 'x', - ask_confirm=confirm) - else: - newconstraints.remove(newcstr) - values = {'x': cstr.eid, - 'v': unicode(newcstr.serialize())} - self.rqlexec('SET X value %(v)s WHERE X eid %(x)s', - values, 'x', ask_confirm=confirm) - # 2. add new constraints - for newcstr in newconstraints: - self.rqlexecall(ss.constraint2rql(rschema, subjtype, objtype, - newcstr), - ask_confirm=confirm) + self.rqlexecall(ss.constraint2rql(rschema, subjtype, objtype, + newcstr), + ask_confirm=confirm) + if syncperms and not rschema in VIRTUAL_RTYPES: + self._synchronize_permissions(rdef, repordef.eid) # base actions ############################################################ - def checkpoint(self): + def checkpoint(self, ask_confirm=True): """checkpoint action""" - if self.confirm('commit now ?', shell=False): + if not ask_confirm or self.confirm('commit now ?', shell=False): self.commit() def cmd_add_cube(self, cube, update_database=True): @@ -579,8 +583,8 @@ # check if attributes has been added to existing entities for rschema in newcubes_schema.relations(): existingschema = self.repo.schema.rschema(rschema.type) - for (fromtype, totype) in rschema.iter_rdefs(): - if existingschema.has_rdef(fromtype, totype): + for (fromtype, totype) in rschema.rdefs: + if (fromtype, totype) in existingschema.rdefs: continue # check we should actually add the relation definition if not (fromtype in new or totype in new or rschema in new): @@ -616,9 +620,9 @@ if rschema in removedcubes_schema and rschema in reposchema: # check if attributes/relations has been added to entities from # other cubes - for fromtype, totype in rschema.iter_rdefs(): - if not removedcubes_schema[rschema.type].has_rdef(fromtype, totype) and \ - reposchema[rschema.type].has_rdef(fromtype, totype): + for fromtype, totype in rschema.rdefs: + if (fromtype, totype) not in removedcubes_schema[rschema.type].rdefs and \ + (fromtype, totype) in reposchema[rschema.type].rdefs: self.cmd_drop_relation_definition( str(fromtype), rschema.type, str(totype)) # execute post-remove files @@ -685,13 +689,11 @@ else: eschema = self.fs_schema.eschema(etype) confirm = self.verbosity >= 2 + groupmap = self.group_mapping() # register the entity into CWEType - self.rqlexecall(ss.eschema2rql(eschema), ask_confirm=confirm) + self.rqlexecall(ss.eschema2rql(eschema, groupmap), ask_confirm=confirm) # add specializes relation if needed self.rqlexecall(ss.eschemaspecialize2rql(eschema), ask_confirm=confirm) - # register groups / permissions for the entity - self.rqlexecall(ss.erperms2rql(eschema, self.group_mapping()), - ask_confirm=confirm) # register entity's attributes for rschema, attrschema in eschema.attribute_definitions(): # ignore those meta relations, they will be automatically added @@ -702,7 +704,8 @@ # actually in the schema self.cmd_add_relation_type(rschema.type, False, commit=True) # register relation definition - self.rqlexecall(ss.rdef2rql(rschema, etype, attrschema.type), + self.rqlexecall(ss.rdef2rql(rschema, etype, attrschema.type, + groupmap=groupmap), ask_confirm=confirm) # take care to newly introduced base class # XXX some part of this should probably be under the "if auto" block @@ -714,8 +717,8 @@ continue if instspschema.specializes() != eschema: self.rqlexec('SET D specializes P WHERE D eid %(d)s, P name %(pn)s', - {'d': instspschema.eid, - 'pn': eschema.type}, ask_confirm=confirm) + {'d': instspschema.eid, + 'pn': eschema.type}, ask_confirm=confirm) for rschema, tschemas, role in spschema.relation_definitions(True): for tschema in tschemas: if not tschema in instschema: @@ -763,7 +766,8 @@ # remember this two avoid adding twice non symetric relation # such as "Emailthread forked_from Emailthread" added.append((etype, rschema.type, targettype)) - self.rqlexecall(ss.rdef2rql(rschema, etype, targettype), + self.rqlexecall(ss.rdef2rql(rschema, etype, targettype, + groupmap=groupmap), ask_confirm=confirm) for rschema in eschema.object_relations(): rtypeadded = rschema.type in instschema or rschema.type in added @@ -783,7 +787,8 @@ elif (targettype, rschema.type, etype) in added: continue # register relation definition - self.rqlexecall(ss.rdef2rql(rschema, targettype, etype), + self.rqlexecall(ss.rdef2rql(rschema, targettype, etype, + groupmap=groupmap), ask_confirm=confirm) if commit: self.commit() @@ -828,12 +833,9 @@ # definitions self.rqlexecall(ss.rschema2rql(rschema, addrdef=False), ask_confirm=self.verbosity>=2) - # register groups / permissions for the relation - self.rqlexecall(ss.erperms2rql(rschema, self.group_mapping()), - ask_confirm=self.verbosity>=2) if addrdef: self.commit() - self.rqlexecall(ss.rdef2rql(rschema), + self.rqlexecall(ss.rdef2rql(rschema, groupmap=self.group_mapping()), ask_confirm=self.verbosity>=2) if rtype in META_RTYPES: # if the relation is in META_RTYPES, ensure we're adding it for @@ -848,7 +850,8 @@ props = rschema.rproperties( rschema.subjects(objtype)[0], objtype) assert props - self.rqlexecall(ss.rdef2rql(rschema, etype, objtype, props), + self.rqlexecall(ss.rdef2rql(rschema, etype, objtype, props, + groupmap=self.group_mapping()), ask_confirm=self.verbosity>=2) if commit: @@ -880,7 +883,8 @@ rschema = self.fs_schema.rschema(rtype) if not rtype in self.repo.schema: self.cmd_add_relation_type(rtype, addrdef=False, commit=True) - self.rqlexecall(ss.rdef2rql(rschema, subjtype, objtype), + self.rqlexecall(ss.rdef2rql(rschema, subjtype, objtype, + groupmap=self.group_mapping()), ask_confirm=self.verbosity>=2) if commit: self.commit() @@ -913,22 +917,25 @@ if isinstance(ertype, (tuple, list)): assert len(ertype) == 3, 'not a relation definition' assert syncprops, 'can\'t update permission for a relation definition' - self._synchronize_rdef_schema(*ertype) - elif syncprops: + self._synchronize_rdef_schema(ertype[0], ertype[1], ertype[2], + syncperms=syncperms, + syncprops=syncprops) + else: erschema = self.repo.schema[ertype] if isinstance(erschema, CubicWebRelationSchema): self._synchronize_rschema(erschema, syncperms=syncperms, + syncprops=syncprops, syncrdefs=syncrdefs) - else: + elif syncprops: self._synchronize_eschema(erschema, syncperms=syncperms) - else: - self._synchronize_permissions(ertype) + else: + self._synchronize_permissions(self.fs_schema[ertype], erschema.eid) else: for etype in self.repo.schema.entities(): if syncprops: self._synchronize_eschema(etype, syncperms=syncperms) else: - self._synchronize_permissions(etype) + self._synchronize_permissions(self.fs_schema[etype], erschema.eid) if commit: self.commit() @@ -1171,10 +1178,10 @@ return ForRqlIterator(self, rql, None, ask_confirm) def cmd_deactivate_verification_hooks(self): - self.repo.hm.deactivate_verification_hooks() + self.config.disabled_hooks_categories.add('integrity') def cmd_reactivate_verification_hooks(self): - self.repo.hm.reactivate_verification_hooks() + self.config.disabled_hooks_categories.remove('integrity') # broken db commands ###################################################### diff -r 15d541321a8c -r 74c1597f8a82 server/pool.py --- a/server/pool.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/pool.py Wed Jan 20 10:13:45 2010 +0100 @@ -1,13 +1,7 @@ -"""CubicWeb server connections pool : - -* the rql repository has a limited number of connections pools, each of them - dealing with a set of connections on each source used by the repository - -* operation may be registered by hooks during a transaction, which will be - fired when the pool is commited or rollbacked - -This module defined the `ConnectionsPool` class and a set of abstract classes -for operation. +"""CubicWeb server connections pool : the repository has a limited number of +connections pools, each of them dealing with a set of connections on each source +used by the repository. A connections pools (`ConnectionsPool`) is an +abstraction for a group of connection to each source. :organization: Logilab @@ -130,163 +124,11 @@ self._cursors.pop(source.uri, None) -class Operation(object): - """an operation is triggered on connections pool events related to - commit / rollback transations. Possible events are: - - precommit: - the pool is preparing to commit. You shouldn't do anything things which - has to be reverted if the commit fail at this point, but you can freely - do any heavy computation or raise an exception if the commit can't go. - You can add some new operation during this phase but their precommit - event won't be triggered - - commit: - the pool is preparing to commit. You should avoid to do to expensive - stuff or something that may cause an exception in this event - - revertcommit: - if an operation failed while commited, this event is triggered for - all operations which had their commit event already to let them - revert things (including the operation which made fail the commit) - - rollback: - the transaction has been either rollbacked either - * intentionaly - * a precommit event failed, all operations are rollbacked - * a commit event failed, all operations which are not been triggered for - commit are rollbacked - - order of operations may be important, and is controlled according to: - * operation's class - """ - - def __init__(self, session, **kwargs): - self.session = session - self.user = session.user - self.repo = session.repo - self.schema = session.repo.schema - self.config = session.repo.config - self.__dict__.update(kwargs) - self.register(session) - # execution information - self.processed = None # 'precommit', 'commit' - self.failed = False - - def register(self, session): - session.add_operation(self, self.insert_index()) - - def insert_index(self): - """return the index of the lastest instance which is not a - LateOperation instance - """ - # faster by inspecting operation in reverse order for heavy transactions - i = None - for i, op in enumerate(reversed(self.session.pending_operations)): - if isinstance(op, (LateOperation, SingleLastOperation)): - continue - return -i or None - if i is None: - return None - return -(i + 1) - - def handle_event(self, event): - """delegate event handling to the operation""" - getattr(self, event)() - - def precommit_event(self): - """the observed connections pool is preparing a commit""" - - def revertprecommit_event(self): - """an error went when pre-commiting this operation or a later one - - should revert pre-commit's changes but take care, they may have not - been all considered if it's this operation which failed - """ - - def commit_event(self): - """the observed connections pool is commiting""" - raise NotImplementedError() - - def revertcommit_event(self): - """an error went when commiting this operation or a later one - - should revert commit's changes but take care, they may have not - been all considered if it's this operation which failed - """ - - def rollback_event(self): - """the observed connections pool has been rollbacked - - do nothing by default, the operation will just be removed from the pool - operation list - """ - - def postcommit_event(self): - """the observed connections pool has committed""" - - -class PreCommitOperation(Operation): - """base class for operation only defining a precommit operation - """ - - def precommit_event(self): - """the observed connections pool is preparing a commit""" - raise NotImplementedError() - - def commit_event(self): - """the observed connections pool is commiting""" - - -class LateOperation(Operation): - """special operation which should be called after all possible (ie non late) - operations - """ - def insert_index(self): - """return the index of the lastest instance which is not a - SingleLastOperation instance - """ - # faster by inspecting operation in reverse order for heavy transactions - i = None - for i, op in enumerate(reversed(self.session.pending_operations)): - if isinstance(op, SingleLastOperation): - continue - return -i or None - if i is None: - return None - return -(i + 1) - - -class SingleOperation(Operation): - """special operation which should be called once""" - def register(self, session): - """override register to handle cases where this operation has already - been added - """ - operations = session.pending_operations - index = self.equivalent_index(operations) - if index is not None: - equivalent = operations.pop(index) - else: - equivalent = None - session.add_operation(self, self.insert_index()) - return equivalent - - def equivalent_index(self, operations): - """return the index of the equivalent operation if any""" - for i, op in enumerate(reversed(operations)): - if op.__class__ is self.__class__: - return -(i+1) - return None - - -class SingleLastOperation(SingleOperation): - """special operation which should be called once and after all other - operations - """ - def insert_index(self): - return None - -from logging import getLogger -from cubicweb import set_log_methods -set_log_methods(Operation, getLogger('cubicweb.session')) +from cubicweb.server.hook import (Operation, LateOperation, SingleOperation, + SingleLastOperation) +from logilab.common.deprecation import class_moved, class_renamed +Operation = class_moved(Operation) +PreCommitOperation = class_renamed('PreCommitOperation', Operation) +LateOperation = class_moved(LateOperation) +SingleOperation = class_moved(SingleOperation) +SingleLastOperation = class_moved(SingleLastOperation) diff -r 15d541321a8c -r 74c1597f8a82 server/querier.py --- a/server/querier.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/querier.py Wed Jan 20 10:13:45 2010 +0100 @@ -14,7 +14,7 @@ from logilab.common.compat import any from rql import RQLHelper, RQLSyntaxError from rql.stmts import Union, Select -from rql.nodes import (Relation, VariableRef, Constant, SubQuery) +from rql.nodes import Relation, VariableRef, Constant, SubQuery from cubicweb import Unauthorized, QueryError, UnknownEid, typed_eid from cubicweb import server @@ -71,14 +71,23 @@ # XXX has_text may have specific perm ? if rel.r_type in READ_ONLY_RTYPES: continue - if not schema.rschema(rel.r_type).has_access(user, 'read'): + rschema = schema.rschema(rel.r_type) + if rschema.final: + eschema = schema.eschema(solution[rel.children[0].name]) + rdef = eschema.rdef(rschema) + else: + rdef = rschema.rdef(solution[rel.children[0].name], + solution[rel.children[1].children[0].name]) + if not user.matching_groups(rdef.get_groups('read')): raise Unauthorized('read', rel.r_type) localchecks = {} # iterate on defined_vars and not on solutions to ignore column aliases for varname in rqlst.defined_vars: etype = solution[varname] eschema = schema.eschema(etype) - if not eschema.has_access(user, 'read'): + if eschema.final: + continue + if not user.matching_groups(eschema.get_groups('read')): erqlexprs = eschema.get_rqlexprs('read') if not erqlexprs: ex = Unauthorized('read', etype) diff -r 15d541321a8c -r 74c1597f8a82 server/repository.py --- a/server/repository.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/repository.py Wed Jan 20 10:13:45 2010 +0100 @@ -34,19 +34,12 @@ ETypeNotSupportedBySources, MultiSourcesError, BadConnectionId, Unauthorized, ValidationError, typed_eid) -from cubicweb.cwvreg import CubicWebVRegistry -from cubicweb.schema import VIRTUAL_RTYPES, CubicWebSchema -from cubicweb import server -from cubicweb.server.utils import RepoThread, LoopTask -from cubicweb.server.pool import ConnectionsPool, LateOperation, SingleLastOperation +from cubicweb import cwvreg, schema, server +from cubicweb.server import utils, hook, pool, querier, sources from cubicweb.server.session import Session, InternalSession -from cubicweb.server.querier import QuerierHelper -from cubicweb.server.sources import get_source -from cubicweb.server.hooksmanager import HooksManager -from cubicweb.server.hookhelper import rproperty -class CleanupEidTypeCacheOp(SingleLastOperation): +class CleanupEidTypeCacheOp(hook.SingleLastOperation): """on rollback of a insert query or commit of delete query, we have to clear repository's cache from no more valid entries @@ -62,7 +55,8 @@ remove inserted eid from repository type/source cache """ try: - self.repo.clear_caches(self.session.transaction_data['pendingeids']) + self.session.repo.clear_caches( + self.session.transaction_data['pendingeids']) except KeyError: pass @@ -71,12 +65,13 @@ remove inserted eid from repository type/source cache """ try: - self.repo.clear_caches(self.session.transaction_data['neweids']) + self.session.repo.clear_caches( + self.session.transaction_data['neweids']) except KeyError: pass -class FTIndexEntityOp(LateOperation): +class FTIndexEntityOp(hook.LateOperation): """operation to delay entity full text indexation to commit since fti indexing may trigger discovery of other entities, it should be @@ -114,7 +109,7 @@ def ensure_card_respected(execute, session, eidfrom, rtype, eidto): - card = rproperty(session, rtype, eidfrom, eidto, 'cardinality') + card = session.schema_rproperty(rtype, eidfrom, eidto, 'cardinality') # one may be tented to check for neweids but this may cause more than one # relation even with '1?' cardinality if thoses relations are added in the # same transaction where the entity is being created. This never occurs from @@ -141,7 +136,7 @@ def __init__(self, config, vreg=None, debug=False): self.config = config if vreg is None: - vreg = CubicWebVRegistry(config, debug) + vreg = cwvreg.CubicWebVRegistry(config, debug) self.vreg = vreg self.pyro_registered = False self.info('starting repository from %s', self.config.apphome) @@ -152,10 +147,10 @@ # list of running threads self._running_threads = [] # initial schema, should be build or replaced latter - self.schema = CubicWebSchema(config.appid) + self.schema = schema.CubicWebSchema(config.appid) self.vreg.schema = self.schema # until actual schema is loaded... # querier helper, need to be created after sources initialization - self.querier = QuerierHelper(self, self.schema) + self.querier = querier.QuerierHelper(self, self.schema) # should we reindex in changes? self.do_fti = not config['delay-full-text-indexation'] # sources @@ -178,16 +173,21 @@ self._type_source_cache = {} # cache (extid, source uri) -> eid self._extid_cache = {} - # create the hooks manager - self.hm = HooksManager(self.schema) # open some connections pools if config.open_connections_pools: self.open_connections_pools() + def _boostrap_hook_registry(self): + """called during bootstrap since we need the metadata hooks""" + hooksdirectory = join(CW_SOFTWARE_ROOT, 'hooks') + self.vreg.init_registration([hooksdirectory]) + self.vreg.load_file(join(hooksdirectory, 'metadata.py'), + 'cubicweb.hooks.metadata') + def open_connections_pools(self): config = self.config self._available_pools = Queue.Queue() - self._available_pools.put_nowait(ConnectionsPool(self.sources)) + self._available_pools.put_nowait(pool.ConnectionsPool(self.sources)) if config.read_instance_schema: # normal start: load the instance schema from the database self.fill_schema() @@ -195,16 +195,14 @@ # usually during repository creation self.warning("set fs instance'schema as bootstrap schema") config.bootstrap_cubes() - self.set_bootstrap_schema(config.load_schema()) + self.set_schema(config.load_schema(), resetvreg=False) # need to load the Any and CWUser entity types etdirectory = join(CW_SOFTWARE_ROOT, 'entities') self.vreg.init_registration([etdirectory]) - self.vreg.load_file(join(etdirectory, '__init__.py'), - 'cubicweb.entities.__init__') - self.vreg.load_file(join(etdirectory, 'authobjs.py'), - 'cubicweb.entities.authobjs') - self.vreg.load_file(join(etdirectory, 'wfobjs.py'), - 'cubicweb.entities.wfobjs') + for modname in ('__init__', 'authobjs', 'wfobjs'): + self.vreg.load_file(join(etdirectory, '%s.py' % modname), + 'cubicweb.entities.%s' % modname) + self._boostrap_hook_registry() else: # test start: use the file system schema (quicker) self.warning("set fs instance'schema") @@ -230,27 +228,29 @@ # list of available pools (we can't iterated on Queue instance) self.pools = [] for i in xrange(config['connections-pool-size']): - self.pools.append(ConnectionsPool(self.sources)) + self.pools.append(pool.ConnectionsPool(self.sources)) self._available_pools.put_nowait(self.pools[-1]) self._shutting_down = False + self.hm = self.vreg['hooks'] if not (config.creating or config.repairing): # call instance level initialisation hooks self.hm.call_hooks('server_startup', repo=self) # register a task to cleanup expired session self.looping_task(config['session-time']/3., self.clean_sessions) - CW_EVENT_MANAGER.bind('after-registry-reload', self.reset_hooks) # internals ############################################################### def get_source(self, uri, source_config): source_config['uri'] = uri - return get_source(source_config, self.schema, self) + return sources.get_source(source_config, self.schema, self) def set_schema(self, schema, resetvreg=True, rebuildinfered=True): if rebuildinfered: schema.rebuild_infered_relations() self.info('set schema %s %#x', schema.name, id(schema)) if resetvreg: + if self.config._cubes is None: + self.config.init_cubes(self.get_cubes()) # full reload of all appobjects self.vreg.reset() self.vreg.set_schema(schema) @@ -260,22 +260,13 @@ for source in self.sources: source.set_schema(schema) self.schema = schema - self.reset_hooks() - - def reset_hooks(self): - self.hm.set_schema(self.schema) - self.hm.register_system_hooks(self.config) - # instance specific hooks - if self.config.instance_hooks: - self.info('loading instance hooks') - self.hm.register_hooks(self.config.load_hooks(self.vreg)) def fill_schema(self): """lod schema from the repository""" from cubicweb.server.schemaserial import deserialize_schema self.info('loading schema from the repository') - appschema = CubicWebSchema(self.config.appid) - self.set_bootstrap_schema(self.config.load_bootstrap_schema()) + appschema = schema.CubicWebSchema(self.config.appid) + self.set_schema(self.config.load_bootstrap_schema(), resetvreg=False) self.debug('deserializing db schema into %s %#x', appschema.name, id(appschema)) session = self.internal_session() try: @@ -289,44 +280,14 @@ raise Exception('Is the database initialised ? (cause: %s)' % (ex.args and ex.args[0].strip() or 'unknown')), \ None, sys.exc_info()[-1] - self.info('set the actual schema') - # XXX have to do this since CWProperty isn't in the bootstrap schema - # it'll be redone in set_schema - self.set_bootstrap_schema(appschema) - # 2.49 migration - if exists(join(self.config.apphome, 'vc.conf')): - session.set_pool() - if not 'template' in file(join(self.config.apphome, 'vc.conf')).read(): - # remaning from cubicweb < 2.38... - session.execute('DELETE CWProperty X WHERE X pkey "system.version.template"') - session.commit() finally: session.close() - self.config.init_cubes(self.get_cubes()) self.set_schema(appschema) - def set_bootstrap_schema(self, schema): - """disable hooks when setting a bootstrap schema, but restore - the configuration for the next time - """ - config = self.config - # XXX refactor - config.core_hooks = False - config.usergroup_hooks = False - config.schema_hooks = False - config.notification_hooks = False - config.instance_hooks = False - self.set_schema(schema, resetvreg=False) - config.core_hooks = True - config.usergroup_hooks = True - config.schema_hooks = True - config.notification_hooks = True - config.instance_hooks = True - def start_looping_tasks(self): assert isinstance(self._looping_tasks, list), 'already started' for i, (interval, func, args) in enumerate(self._looping_tasks): - self._looping_tasks[i] = task = LoopTask(interval, func, args) + self._looping_tasks[i] = task = utils.LoopTask(interval, func, args) self.info('starting task %s with interval %.2fs', task.name, interval) task.start() @@ -346,7 +307,7 @@ def threaded_task(self, func): """start function in a separated thread""" - t = RepoThread(func, self._running_threads) + t = utils.RepoThread(func, self._running_threads) t.start() #@locked @@ -421,7 +382,7 @@ session.close() return login - def authenticate_user(self, session, login, password): + def authenticate_user(self, session, login, **kwargs): """validate login / password, raise AuthenticationError on failure return associated CWUser instance on success """ @@ -430,7 +391,7 @@ for source in self.sources: if source.support_entity('CWUser'): try: - eid = source.authenticate(session, login, password) + eid = source.authenticate(session, login, **kwargs) break except AuthenticationError: continue @@ -569,7 +530,7 @@ session.close() return True - def connect(self, login, password, cnxprops=None): + def connect(self, login, **kwargs): """open a connection for a given user base_url may be needed to send mails @@ -581,16 +542,17 @@ # use an internal connection session = self.internal_session() # try to get a user object + cnxprops = kwargs.pop('cnxprops', None) try: - user = self.authenticate_user(session, login, password) + user = self.authenticate_user(session, login, **kwargs) finally: session.close() session = Session(user, self, cnxprops) - user.req = user.rset.req = session + user._cw = user.cw_rset.req = session user.clear_related_cache() self._sessions[session.id] = session self.info('opened %s', session) - self.hm.call_hooks('session_open', session=session) + self.hm.call_hooks('session_open', session) # commit session at this point in case write operation has been done # during `session_open` hooks session.commit() @@ -681,7 +643,7 @@ checkshuttingdown=checkshuttingdown) # operation uncommited before close are rollbacked before hook is called session.rollback() - self.hm.call_hooks('session_close', session=session) + self.hm.call_hooks('session_close', session) # commit session at this point in case write operation has been done # during `session_close` hooks session.commit() @@ -862,11 +824,11 @@ entity = source.before_entity_insertion(session, extid, etype, eid) entity._cw_recreating = True if source.should_call_hooks: - self.hm.call_hooks('before_add_entity', etype, session, entity) + self.hm.call_hooks('before_add_entity', session, entity=entity) # XXX add fti op ? source.after_entity_insertion(session, extid, entity) if source.should_call_hooks: - self.hm.call_hooks('after_add_entity', etype, session, entity) + self.hm.call_hooks('after_add_entity', session, entity=entity) if reset_pool: session.reset_pool() return eid @@ -889,16 +851,17 @@ if not hasattr(entity, 'edited_attributes'): entity.edited_attributes = set() if source.should_call_hooks: - self.hm.call_hooks('before_add_entity', etype, session, entity) + entity.edited_attributes = set(entity) + self.hm.call_hooks('before_add_entity', session, entity=entity) # XXX call add_info with complete=False ? self.add_info(session, entity, source, extid) source.after_entity_insertion(session, extid, entity) if source.should_call_hooks: - self.hm.call_hooks('after_add_entity', etype, session, entity) + self.hm.call_hooks('after_add_entity', session, entity=entity) else: # minimal meta-data session.execute('SET X is E WHERE X eid %(x)s, E name %(name)s', - {'x': entity.eid, 'name': entity.id}, 'x') + {'x': entity.eid, 'name': entity.__regid__}, 'x') session.commit(reset_pool) return eid except: @@ -953,7 +916,7 @@ pendingrtypes = session.transaction_data.get('pendingrtypes', ()) for rschema, targetschemas, x in eschema.relation_definitions(): rtype = rschema.type - if rtype in VIRTUAL_RTYPES or rtype in pendingrtypes: + if rtype in schema.VIRTUAL_RTYPES or rtype in pendingrtypes: continue var = '%s%s' % (rtype.upper(), x.upper()) if x == 'subject': @@ -1022,7 +985,7 @@ print 'ADD entity', etype, entity.eid, dict(entity) relations = [] if source.should_call_hooks: - self.hm.call_hooks('before_add_entity', etype, session, entity) + self.hm.call_hooks('before_add_entity', session, entity=entity) # XXX use entity.keys here since edited_attributes is not updated for # inline relations for attr in entity.keys(): @@ -1043,7 +1006,7 @@ session.set_entity_cache(entity) for rschema in eschema.subject_relations(): rtype = str(rschema) - if rtype in VIRTUAL_RTYPES: + if rtype in schema.VIRTUAL_RTYPES: continue if rschema.final: entity.setdefault(rtype, None) @@ -1051,7 +1014,7 @@ entity.set_related_cache(rtype, 'subject', session.empty_rset()) for rschema in eschema.object_relations(): rtype = str(rschema) - if rtype in VIRTUAL_RTYPES: + if rtype in schema.VIRTUAL_RTYPES: continue entity.set_related_cache(rtype, 'object', session.empty_rset()) # set inline relation cache before call to after_add_entity @@ -1059,13 +1022,13 @@ session.update_rel_cache_add(entity.eid, attr, value) # trigger after_add_entity after after_add_relation if source.should_call_hooks: - self.hm.call_hooks('after_add_entity', etype, session, entity) + self.hm.call_hooks('after_add_entity', session, entity=entity) # call hooks for inlined relations for attr, value in relations: - self.hm.call_hooks('before_add_relation', attr, session, - entity.eid, attr, value) - self.hm.call_hooks('after_add_relation', attr, session, - entity.eid, attr, value) + self.hm.call_hooks('before_add_relation', session, + eidfrom=entity.eid, rtype=attr, eidto=value) + self.hm.call_hooks('after_add_relation', session, + eidfrom=entity.eid, rtype=attr, eidto=value) return entity.eid def glob_update_entity(self, session, entity, edited_attributes): @@ -1087,7 +1050,7 @@ continue rschema = eschema.subjrels[attr] if rschema.final: - if eschema.rproperty(attr, 'fulltextindexed'): + if getattr(eschema.rdef(attr), 'fulltextindexed', False): need_fti_update = True only_inline_rels = False else: @@ -1098,19 +1061,18 @@ if previous_value == entity[attr]: previous_value = None else: - self.hm.call_hooks('before_delete_relation', attr, - session, entity.eid, attr, - previous_value) + self.hm.call_hooks('before_delete_relation', session, + eidfrom=entity.eid, rtype=attr, + eidto=previous_value) relations.append((attr, entity[attr], previous_value)) source = self.source_from_eid(entity.eid, session) if source.should_call_hooks: # call hooks for inlined relations for attr, value, _ in relations: - self.hm.call_hooks('before_add_relation', attr, session, - entity.eid, attr, value) + self.hm.call_hooks('before_add_relation', session, + eidfrom=entity.eid, rtype=attr, eidto=value) if not only_inline_rels: - self.hm.call_hooks('before_update_entity', etype, session, - entity) + self.hm.call_hooks('before_update_entity', session, entity=entity) source.update_entity(session, entity) if not only_inline_rels: if need_fti_update and self.do_fti: @@ -1118,15 +1080,14 @@ # one indexable attribute FTIndexEntityOp(session, entity=entity) if source.should_call_hooks: - self.hm.call_hooks('after_update_entity', etype, session, - entity) + self.hm.call_hooks('after_update_entity', session, entity=entity) if source.should_call_hooks: for attr, value, prevvalue in relations: # if the relation is already cached, update existant cache relcache = entity.relation_cached(attr, 'subject') if prevvalue is not None: - self.hm.call_hooks('after_delete_relation', attr, session, - entity.eid, attr, prevvalue) + self.hm.call_hooks('after_delete_relation', session, + eidfrom=entity.eid, rtype=attr, eidto=prevvalue) if relcache is not None: session.update_rel_cache_del(entity.eid, attr, prevvalue) del_existing_rel_if_needed(session, entity.eid, attr, value) @@ -1135,8 +1096,8 @@ else: entity.set_related_cache(attr, 'subject', session.eid_rset(value)) - self.hm.call_hooks('after_add_relation', attr, session, - entity.eid, attr, value) + self.hm.call_hooks('after_add_relation', session, + eidfrom=entity.eid, rtype=attr, eidto=value) def glob_delete_entity(self, session, eid): """delete an entity and all related entities from the repository""" @@ -1149,11 +1110,12 @@ server.DEBUG |= (server.DBG_SQL | server.DBG_RQL | server.DBG_MORE) source = self.sources_by_uri[uri] if source.should_call_hooks: - self.hm.call_hooks('before_delete_entity', etype, session, eid) + entity = session.entity_from_eid(eid) + self.hm.call_hooks('before_delete_entity', session, entity=entity) self._delete_info(session, eid) source.delete_entity(session, etype, eid) if source.should_call_hooks: - self.hm.call_hooks('after_delete_entity', etype, session, eid) + self.hm.call_hooks('after_delete_entity', session, entity=entity) # don't clear cache here this is done in a hook on commit def glob_add_relation(self, session, subject, rtype, object): @@ -1163,14 +1125,14 @@ source = self.locate_relation_source(session, subject, rtype, object) if source.should_call_hooks: del_existing_rel_if_needed(session, subject, rtype, object) - self.hm.call_hooks('before_add_relation', rtype, session, - subject, rtype, object) + self.hm.call_hooks('before_add_relation', session, + eidfrom=subject, rtype=rtype, eidto=object) source.add_relation(session, subject, rtype, object) rschema = self.schema.rschema(rtype) session.update_rel_cache_add(subject, rtype, object, rschema.symetric) if source.should_call_hooks: - self.hm.call_hooks('after_add_relation', rtype, session, - subject, rtype, object) + self.hm.call_hooks('after_add_relation', session, + eidfrom=subject, rtype=rtype, eidto=object) def glob_delete_relation(self, session, subject, rtype, object): """delete a relation from the repository""" @@ -1178,8 +1140,8 @@ print 'DELETE relation', subject, rtype, object source = self.locate_relation_source(session, subject, rtype, object) if source.should_call_hooks: - self.hm.call_hooks('before_delete_relation', rtype, session, - subject, rtype, object) + self.hm.call_hooks('before_delete_relation', session, + eidfrom=subject, rtype=rtype, eidto=object) source.delete_relation(session, subject, rtype, object) rschema = self.schema.rschema(rtype) session.update_rel_cache_del(subject, rtype, object, rschema.symetric) @@ -1188,8 +1150,8 @@ # stored so try to delete both source.delete_relation(session, object, rtype, subject) if source.should_call_hooks: - self.hm.call_hooks('after_delete_relation', rtype, session, - subject, rtype, object) + self.hm.call_hooks('after_delete_relation', session, + eidfrom=subject, rtype=rtype, eidto=object) # pyro handling ########################################################### diff -r 15d541321a8c -r 74c1597f8a82 server/schemahooks.py --- a/server/schemahooks.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1121 +0,0 @@ -"""schema hooks: - -- synchronize the living schema object with the persistent schema -- perform physical update on the source when necessary - -checking for schema consistency is done in hooks.py - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -from yams.schema import BASE_TYPES -from yams.buildobjs import EntityType, RelationType, RelationDefinition -from yams.schema2sql import eschema2sql, rschema2sql, type_from_constraints - -from logilab.common.decorators import clear_cache - -from cubicweb import ValidationError, RepositoryError -from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES, CONSTRAINTS -from cubicweb.server import schemaserial as ss -from cubicweb.server.sqlutils import SQL_PREFIX -from cubicweb.server.pool import Operation, SingleLastOperation, PreCommitOperation -from cubicweb.server.hookhelper import entity_oldnewvalue, check_internal_entity - - -TYPE_CONVERTER = { # XXX - 'Boolean': bool, - 'Int': int, - 'Float': float, - 'Password': str, - 'String': unicode, - 'Date' : unicode, - 'Datetime' : unicode, - 'Time' : unicode, - } - -# core entity and relation types which can't be removed -CORE_ETYPES = list(BASE_TYPES) + ['CWEType', 'CWRType', 'CWUser', 'CWGroup', - 'CWConstraint', 'CWAttribute', 'CWRelation'] -CORE_RTYPES = ['eid', 'creation_date', 'modification_date', 'cwuri', - 'login', 'upassword', 'name', - 'is', 'instanceof', 'owned_by', 'created_by', 'in_group', - 'relation_type', 'from_entity', 'to_entity', - 'constrainted_by', - 'read_permission', 'add_permission', - 'delete_permission', 'updated_permission', - ] - -def get_constraints(session, entity): - constraints = [] - for cstreid in session.transaction_data.get(entity.eid, ()): - cstrent = session.entity_from_eid(cstreid) - cstr = CONSTRAINTS[cstrent.type].deserialize(cstrent.value) - cstr.eid = cstreid - constraints.append(cstr) - return constraints - -def add_inline_relation_column(session, etype, rtype): - """add necessary column and index for an inlined relation""" - table = SQL_PREFIX + etype - column = SQL_PREFIX + rtype - try: - session.system_sql(str('ALTER TABLE %s ADD COLUMN %s integer' - % (table, column)), rollback_on_failure=False) - session.info('added column %s to table %s', column, table) - except: - # silent exception here, if this error has not been raised because the - # column already exists, index creation will fail anyway - session.exception('error while adding column %s to table %s', - table, column) - # create index before alter table which may expectingly fail during test - # (sqlite) while index creation should never fail (test for index existence - # is done by the dbhelper) - session.pool.source('system').create_index(session, table, column) - session.info('added index on %s(%s)', table, column) - session.transaction_data.setdefault('createdattrs', []).append( - '%s.%s' % (etype, rtype)) - - -# operations for low-level database alteration ################################ - -class DropTable(PreCommitOperation): - """actually remove a database from the instance's schema""" - table = None # make pylint happy - def precommit_event(self): - dropped = self.session.transaction_data.setdefault('droppedtables', - set()) - if self.table in dropped: - return # already processed - dropped.add(self.table) - self.session.system_sql('DROP TABLE %s' % self.table) - self.info('dropped table %s', self.table) - - -class DropRelationTable(DropTable): - def __init__(self, session, rtype): - super(DropRelationTable, self).__init__( - session, table='%s_relation' % rtype) - session.transaction_data.setdefault('pendingrtypes', set()).add(rtype) - - -class DropColumn(PreCommitOperation): - """actually remove the attribut's column from entity table in the system - database - """ - table = column = None # make pylint happy - def precommit_event(self): - session, table, column = self.session, self.table, self.column - # drop index if any - session.pool.source('system').drop_index(session, table, column) - try: - session.system_sql('ALTER TABLE %s DROP COLUMN %s' - % (table, column), rollback_on_failure=False) - self.info('dropped column %s from table %s', column, table) - except Exception, ex: - # not supported by sqlite for instance - self.error('error while altering table %s: %s', table, ex) - - -# base operations for in-memory schema synchronization ######################## - -class MemSchemaNotifyChanges(SingleLastOperation): - """the update schema operation: - - special operation which should be called once and after all other schema - operations. It will trigger internal structures rebuilding to consider - schema changes - """ - - def __init__(self, session): - self.repo = session.repo - SingleLastOperation.__init__(self, session) - - def precommit_event(self): - for eschema in self.repo.schema.entities(): - if not eschema.final: - clear_cache(eschema, 'ordered_relations') - - def commit_event(self): - rebuildinfered = self.session.data.get('rebuild-infered', True) - self.repo.set_schema(self.repo.schema, rebuildinfered=rebuildinfered) - # CWUser class might have changed, update current session users - cwuser_cls = self.session.vreg['etypes'].etype_class('CWUser') - for session in self.repo._sessions.values(): - session.user.__class__ = cwuser_cls - - def rollback_event(self): - self.precommit_event() - - -class MemSchemaOperation(Operation): - """base class for schema operations""" - def __init__(self, session, kobj=None, **kwargs): - self.schema = session.schema - self.kobj = kobj - # once Operation.__init__ has been called, event may be triggered, so - # do this last ! - Operation.__init__(self, session, **kwargs) - # every schema operation is triggering a schema update - MemSchemaNotifyChanges(session) - - def prepare_constraints(self, subjtype, rtype, objtype): - constraints = rtype.rproperty(subjtype, objtype, 'constraints') - self.constraints = list(constraints) - rtype.set_rproperty(subjtype, objtype, 'constraints', self.constraints) - - -class MemSchemaEarlyOperation(MemSchemaOperation): - def insert_index(self): - """schema operation which are inserted at the begining of the queue - (typically to add/remove entity or relation types) - """ - i = -1 - for i, op in enumerate(self.session.pending_operations): - if not isinstance(op, MemSchemaEarlyOperation): - return i - return i + 1 - - -class MemSchemaPermissionOperation(MemSchemaOperation): - """base class to synchronize schema permission definitions""" - def __init__(self, session, perm, etype_eid): - self.perm = perm - try: - self.name = session.entity_from_eid(etype_eid).name - except IndexError: - self.error('changing permission of a no more existant type #%s', - etype_eid) - else: - Operation.__init__(self, session) - - -# operations for high-level source database alteration ######################## - -class SourceDbCWETypeRename(PreCommitOperation): - """this operation updates physical storage accordingly""" - oldname = newname = None # make pylint happy - - def precommit_event(self): - # we need sql to operate physical changes on the system database - sqlexec = self.session.system_sql - sqlexec('ALTER TABLE %s%s RENAME TO %s%s' % (SQL_PREFIX, self.oldname, - SQL_PREFIX, self.newname)) - self.info('renamed table %s to %s', self.oldname, self.newname) - sqlexec('UPDATE entities SET type=%s WHERE type=%s', - (self.newname, self.oldname)) - sqlexec('UPDATE deleted_entities SET type=%s WHERE type=%s', - (self.newname, self.oldname)) - - -class SourceDbCWRTypeUpdate(PreCommitOperation): - """actually update some properties of a relation definition""" - rschema = values = entity = None # make pylint happy - - def precommit_event(self): - session = self.session - rschema = self.rschema - if rschema.final or not 'inlined' in self.values: - return # nothing to do - inlined = self.values['inlined'] - entity = self.entity - # check in-lining is necessary / possible - if not entity.inlined_changed(inlined): - return # nothing to do - # inlined changed, make necessary physical changes! - sqlexec = self.session.system_sql - rtype = rschema.type - eidcolumn = SQL_PREFIX + 'eid' - if not inlined: - # need to create the relation if it has not been already done by - # another event of the same transaction - if not rschema.type in session.transaction_data.get('createdtables', ()): - tablesql = rschema2sql(rschema) - # create the necessary table - for sql in tablesql.split(';'): - if sql.strip(): - sqlexec(sql) - session.transaction_data.setdefault('createdtables', []).append( - rschema.type) - # copy existant data - column = SQL_PREFIX + rtype - for etype in rschema.subjects(): - table = SQL_PREFIX + str(etype) - sqlexec('INSERT INTO %s_relation SELECT %s, %s FROM %s WHERE NOT %s IS NULL' - % (rtype, eidcolumn, column, table, column)) - # drop existant columns - for etype in rschema.subjects(): - DropColumn(session, table=SQL_PREFIX + str(etype), - column=SQL_PREFIX + rtype) - else: - for etype in rschema.subjects(): - try: - add_inline_relation_column(session, str(etype), rtype) - except Exception, ex: - # the column probably already exists. this occurs when the - # entity's type has just been added or if the column has not - # been previously dropped - self.error('error while altering table %s: %s', etype, ex) - # copy existant data. - # XXX don't use, it's not supported by sqlite (at least at when i tried it) - #sqlexec('UPDATE %(etype)s SET %(rtype)s=eid_to ' - # 'FROM %(rtype)s_relation ' - # 'WHERE %(etype)s.eid=%(rtype)s_relation.eid_from' - # % locals()) - table = SQL_PREFIX + str(etype) - cursor = sqlexec('SELECT eid_from, eid_to FROM %(table)s, ' - '%(rtype)s_relation WHERE %(table)s.%(eidcolumn)s=' - '%(rtype)s_relation.eid_from' % locals()) - args = [{'val': eid_to, 'x': eid} for eid, eid_to in cursor.fetchall()] - if args: - column = SQL_PREFIX + rtype - cursor.executemany('UPDATE %s SET %s=%%(val)s WHERE %s=%%(x)s' - % (table, column, eidcolumn), args) - # drop existant table - DropRelationTable(session, rtype) - - -class SourceDbCWAttributeAdd(PreCommitOperation): - """an attribute relation (CWAttribute) has been added: - * add the necessary column - * set default on this column if any and possible - * register an operation to add the relation definition to the - instance's schema on commit - - constraints are handled by specific hooks - """ - entity = None # make pylint happy - - def init_rdef(self, **kwargs): - entity = self.entity - fromentity = entity.stype - self.session.execute('SET X ordernum Y+1 ' - 'WHERE X from_entity SE, SE eid %(se)s, X ordernum Y, ' - 'X ordernum >= %(order)s, NOT X eid %(x)s', - {'x': entity.eid, 'se': fromentity.eid, - 'order': entity.ordernum or 0}) - subj = str(fromentity.name) - rtype = entity.rtype.name - obj = str(entity.otype.name) - constraints = get_constraints(self.session, entity) - rdef = RelationDefinition(subj, rtype, obj, - description=entity.description, - cardinality=entity.cardinality, - constraints=constraints, - order=entity.ordernum, - eid=entity.eid, - **kwargs) - MemSchemaRDefAdd(self.session, rdef) - return rdef - - def precommit_event(self): - session = self.session - entity = self.entity - # entity.defaultval is a string or None, but we need a correctly typed - # value - default = entity.defaultval - if default is not None: - default = TYPE_CONVERTER[entity.otype.name](default) - props = {'default': default, - 'indexed': entity.indexed, - 'fulltextindexed': entity.fulltextindexed, - 'internationalizable': entity.internationalizable} - rdef = self.init_rdef(**props) - sysource = session.pool.source('system') - attrtype = type_from_constraints(sysource.dbhelper, rdef.object, - rdef.constraints) - # XXX should be moved somehow into lgc.adbh: sqlite doesn't support to - # add a new column with UNIQUE, it should be added after the ALTER TABLE - # using ADD INDEX - if sysource.dbdriver == 'sqlite' and 'UNIQUE' in attrtype: - extra_unique_index = True - attrtype = attrtype.replace(' UNIQUE', '') - else: - extra_unique_index = False - # added some str() wrapping query since some backend (eg psycopg) don't - # allow unicode queries - table = SQL_PREFIX + rdef.subject - column = SQL_PREFIX + rdef.name - try: - session.system_sql(str('ALTER TABLE %s ADD COLUMN %s %s' - % (table, column, attrtype)), - rollback_on_failure=False) - self.info('added column %s to table %s', table, column) - except Exception, ex: - # the column probably already exists. this occurs when - # the entity's type has just been added or if the column - # has not been previously dropped - self.error('error while altering table %s: %s', table, ex) - if extra_unique_index or entity.indexed: - try: - sysource.create_index(session, table, column, - unique=extra_unique_index) - except Exception, ex: - self.error('error while creating index for %s.%s: %s', - table, column, ex) - # final relations are not infered, propagate - try: - eschema = self.schema.eschema(rdef.subject) - except KeyError: - return # entity type currently being added - # propagate attribute to children classes - rschema = self.schema.rschema(rdef.name) - # if relation type has been inserted in the same transaction, its final - # attribute is still set to False, so we've to ensure it's False - rschema.final = True - # XXX 'infered': True/False, not clear actually - props.update({'constraints': rdef.constraints, - 'description': rdef.description, - 'cardinality': rdef.cardinality, - 'constraints': rdef.constraints, - 'order': rdef.order}) - for specialization in eschema.specialized_by(False): - if rschema.has_rdef(specialization, rdef.object): - continue - for rql, args in ss.frdef2rql(rschema, str(specialization), - rdef.object, props): - session.execute(rql, args) - # set default value, using sql for performance and to avoid - # modification_date update - if default: - session.system_sql('UPDATE %s SET %s=%%(default)s' % (table, column), - {'default': default}) - - -class SourceDbCWRelationAdd(SourceDbCWAttributeAdd): - """an actual relation has been added: - * if this is an inlined relation, add the necessary column - else if it's the first instance of this relation type, add the - necessary table and set default permissions - * register an operation to add the relation definition to the - instance's schema on commit - - constraints are handled by specific hooks - """ - entity = None # make pylint happy - - def precommit_event(self): - session = self.session - entity = self.entity - rdef = self.init_rdef(composite=entity.composite) - schema = session.schema - rtype = rdef.name - rschema = session.schema.rschema(rtype) - # this have to be done before permissions setting - if rschema.inlined: - # need to add a column if the relation is inlined and if this is the - # first occurence of "Subject relation Something" whatever Something - # and if it has not been added during other event of the same - # transaction - key = '%s.%s' % (rdef.subject, rtype) - try: - alreadythere = bool(rschema.objects(rdef.subject)) - except KeyError: - alreadythere = False - if not (alreadythere or - key in session.transaction_data.get('createdattrs', ())): - add_inline_relation_column(session, rdef.subject, rtype) - else: - # need to create the relation if no relation definition in the - # schema and if it has not been added during other event of the same - # transaction - if not (rschema.subjects() or - rtype in session.transaction_data.get('createdtables', ())): - try: - rschema = session.schema.rschema(rtype) - tablesql = rschema2sql(rschema) - except KeyError: - # fake we add it to the schema now to get a correctly - # initialized schema but remove it before doing anything - # more dangerous... - rschema = session.schema.add_relation_type(rdef) - tablesql = rschema2sql(rschema) - session.schema.del_relation_type(rtype) - # create the necessary table - for sql in tablesql.split(';'): - if sql.strip(): - session.system_sql(sql) - session.transaction_data.setdefault('createdtables', []).append( - rtype) - - -class SourceDbRDefUpdate(PreCommitOperation): - """actually update some properties of a relation definition""" - rschema = values = None # make pylint happy - - def precommit_event(self): - etype = self.kobj[0] - table = SQL_PREFIX + etype - column = SQL_PREFIX + self.rschema.type - if 'indexed' in self.values: - sysource = self.session.pool.source('system') - if self.values['indexed']: - sysource.create_index(self.session, table, column) - else: - sysource.drop_index(self.session, table, column) - if 'cardinality' in self.values and self.rschema.final: - adbh = self.session.pool.source('system').dbhelper - if not adbh.alter_column_support: - # not supported (and NOT NULL not set by yams in that case, so - # no worry) - return - atype = self.rschema.objects(etype)[0] - constraints = self.rschema.rproperty(etype, atype, 'constraints') - coltype = type_from_constraints(adbh, atype, constraints, - creating=False) - # XXX check self.values['cardinality'][0] actually changed? - sql = adbh.sql_set_null_allowed(table, column, coltype, - self.values['cardinality'][0] != '1') - self.session.system_sql(sql) - - -class SourceDbCWConstraintAdd(PreCommitOperation): - """actually update constraint of a relation definition""" - entity = None # make pylint happy - cancelled = False - - def precommit_event(self): - rdef = self.entity.reverse_constrained_by[0] - session = self.session - # when the relation is added in the same transaction, the constraint - # object is created by the operation adding the attribute or relation, - # so there is nothing to do here - if rdef.eid in session.transaction_data.get('neweids', ()): - return - subjtype, rtype, objtype = session.schema.schema_by_eid(rdef.eid) - cstrtype = self.entity.type - oldcstr = rtype.constraint_by_type(subjtype, objtype, cstrtype) - newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value) - table = SQL_PREFIX + str(subjtype) - column = SQL_PREFIX + str(rtype) - # alter the physical schema on size constraint changes - if newcstr.type() == 'SizeConstraint' and ( - oldcstr is None or oldcstr.max != newcstr.max): - adbh = self.session.pool.source('system').dbhelper - card = rtype.rproperty(subjtype, objtype, 'cardinality') - coltype = type_from_constraints(adbh, objtype, [newcstr], - creating=False) - sql = adbh.sql_change_col_type(table, column, coltype, card != '1') - try: - session.system_sql(sql, rollback_on_failure=False) - self.info('altered column %s of table %s: now VARCHAR(%s)', - column, table, newcstr.max) - except Exception, ex: - # not supported by sqlite for instance - self.error('error while altering table %s: %s', table, ex) - elif cstrtype == 'UniqueConstraint' and oldcstr is None: - session.pool.source('system').create_index( - self.session, table, column, unique=True) - - -class SourceDbCWConstraintDel(PreCommitOperation): - """actually remove a constraint of a relation definition""" - rtype = subjtype = objtype = None # make pylint happy - - def precommit_event(self): - cstrtype = self.cstr.type() - table = SQL_PREFIX + str(self.subjtype) - column = SQL_PREFIX + str(self.rtype) - # alter the physical schema on size/unique constraint changes - if cstrtype == 'SizeConstraint': - try: - self.session.system_sql('ALTER TABLE %s ALTER COLUMN %s TYPE TEXT' - % (table, column), - rollback_on_failure=False) - self.info('altered column %s of table %s: now TEXT', - column, table) - except Exception, ex: - # not supported by sqlite for instance - self.error('error while altering table %s: %s', table, ex) - elif cstrtype == 'UniqueConstraint': - self.session.pool.source('system').drop_index( - self.session, table, column, unique=True) - - -# operations for in-memory schema synchronization ############################# - -class MemSchemaCWETypeAdd(MemSchemaEarlyOperation): - """actually add the entity type to the instance's schema""" - eid = None # make pylint happy - def commit_event(self): - self.schema.add_entity_type(self.kobj) - - -class MemSchemaCWETypeRename(MemSchemaOperation): - """this operation updates physical storage accordingly""" - oldname = newname = None # make pylint happy - - def commit_event(self): - self.session.schema.rename_entity_type(self.oldname, self.newname) - - -class MemSchemaCWETypeDel(MemSchemaOperation): - """actually remove the entity type from the instance's schema""" - def commit_event(self): - try: - # del_entity_type also removes entity's relations - self.schema.del_entity_type(self.kobj) - except KeyError: - # s/o entity type have already been deleted - pass - - -class MemSchemaCWRTypeAdd(MemSchemaEarlyOperation): - """actually add the relation type to the instance's schema""" - eid = None # make pylint happy - def commit_event(self): - rschema = self.schema.add_relation_type(self.kobj) - rschema.set_default_groups() - - -class MemSchemaCWRTypeUpdate(MemSchemaOperation): - """actually update some properties of a relation definition""" - rschema = values = None # make pylint happy - - def commit_event(self): - # structure should be clean, not need to remove entity's relations - # at this point - self.rschema.__dict__.update(self.values) - - -class MemSchemaCWRTypeDel(MemSchemaOperation): - """actually remove the relation type from the instance's schema""" - def commit_event(self): - try: - self.schema.del_relation_type(self.kobj) - except KeyError: - # s/o entity type have already been deleted - pass - - -class MemSchemaRDefAdd(MemSchemaEarlyOperation): - """actually add the attribute relation definition to the instance's - schema - """ - def commit_event(self): - self.schema.add_relation_def(self.kobj) - - -class MemSchemaRDefUpdate(MemSchemaOperation): - """actually update some properties of a relation definition""" - rschema = values = None # make pylint happy - - def commit_event(self): - # structure should be clean, not need to remove entity's relations - # at this point - self.rschema._rproperties[self.kobj].update(self.values) - - -class MemSchemaRDefDel(MemSchemaOperation): - """actually remove the relation definition from the instance's schema""" - def commit_event(self): - subjtype, rtype, objtype = self.kobj - try: - self.schema.del_relation_def(subjtype, rtype, objtype) - except KeyError: - # relation type may have been already deleted - pass - - -class MemSchemaCWConstraintAdd(MemSchemaOperation): - """actually update constraint of a relation definition - - has to be called before SourceDbCWConstraintAdd - """ - cancelled = False - - def precommit_event(self): - rdef = self.entity.reverse_constrained_by[0] - # when the relation is added in the same transaction, the constraint - # object is created by the operation adding the attribute or relation, - # so there is nothing to do here - if rdef.eid in self.session.transaction_data.get('neweids', ()): - self.cancelled = True - return - subjtype, rtype, objtype = self.session.schema.schema_by_eid(rdef.eid) - self.prepare_constraints(subjtype, rtype, objtype) - cstrtype = self.entity.type - self.cstr = rtype.constraint_by_type(subjtype, objtype, cstrtype) - self.newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value) - self.newcstr.eid = self.entity.eid - - def commit_event(self): - if self.cancelled: - return - # in-place modification - if not self.cstr is None: - self.constraints.remove(self.cstr) - self.constraints.append(self.newcstr) - - -class MemSchemaCWConstraintDel(MemSchemaOperation): - """actually remove a constraint of a relation definition - - has to be called before SourceDbCWConstraintDel - """ - rtype = subjtype = objtype = None # make pylint happy - def precommit_event(self): - self.prepare_constraints(self.subjtype, self.rtype, self.objtype) - - def commit_event(self): - self.constraints.remove(self.cstr) - - -class MemSchemaPermissionCWGroupAdd(MemSchemaPermissionOperation): - """synchronize schema when a *_permission relation has been added on a group - """ - def __init__(self, session, perm, etype_eid, group_eid): - self.group = session.entity_from_eid(group_eid).name - super(MemSchemaPermissionCWGroupAdd, self).__init__( - session, perm, etype_eid) - - def commit_event(self): - """the observed connections pool has been commited""" - try: - erschema = self.schema[self.name] - except KeyError: - # duh, schema not found, log error and skip operation - self.error('no schema for %s', self.name) - return - groups = list(erschema.get_groups(self.perm)) - try: - groups.index(self.group) - self.warning('group %s already have permission %s on %s', - self.group, self.perm, erschema.type) - except ValueError: - groups.append(self.group) - erschema.set_groups(self.perm, groups) - - -class MemSchemaPermissionCWGroupDel(MemSchemaPermissionCWGroupAdd): - """synchronize schema when a *_permission relation has been deleted from a - group - """ - - def commit_event(self): - """the observed connections pool has been commited""" - try: - erschema = self.schema[self.name] - except KeyError: - # duh, schema not found, log error and skip operation - self.error('no schema for %s', self.name) - return - groups = list(erschema.get_groups(self.perm)) - try: - groups.remove(self.group) - erschema.set_groups(self.perm, groups) - except ValueError: - self.error('can\'t remove permission %s on %s to group %s', - self.perm, erschema.type, self.group) - - -class MemSchemaPermissionRQLExpressionAdd(MemSchemaPermissionOperation): - """synchronize schema when a *_permission relation has been added on a rql - expression - """ - def __init__(self, session, perm, etype_eid, expression): - self.expr = expression - super(MemSchemaPermissionRQLExpressionAdd, self).__init__( - session, perm, etype_eid) - - def commit_event(self): - """the observed connections pool has been commited""" - try: - erschema = self.schema[self.name] - except KeyError: - # duh, schema not found, log error and skip operation - self.error('no schema for %s', self.name) - return - exprs = list(erschema.get_rqlexprs(self.perm)) - exprs.append(erschema.rql_expression(self.expr)) - erschema.set_rqlexprs(self.perm, exprs) - - -class MemSchemaPermissionRQLExpressionDel(MemSchemaPermissionRQLExpressionAdd): - """synchronize schema when a *_permission relation has been deleted from an - rql expression - """ - - def commit_event(self): - """the observed connections pool has been commited""" - try: - erschema = self.schema[self.name] - except KeyError: - # duh, schema not found, log error and skip operation - self.error('no schema for %s', self.name) - return - rqlexprs = list(erschema.get_rqlexprs(self.perm)) - for i, rqlexpr in enumerate(rqlexprs): - if rqlexpr.expression == self.expr: - rqlexprs.pop(i) - break - else: - self.error('can\'t remove permission %s on %s for expression %s', - self.perm, erschema.type, self.expr) - return - erschema.set_rqlexprs(self.perm, rqlexprs) - - -class MemSchemaSpecializesAdd(MemSchemaOperation): - - def commit_event(self): - eschema = self.session.schema.schema_by_eid(self.etypeeid) - parenteschema = self.session.schema.schema_by_eid(self.parentetypeeid) - eschema._specialized_type = parenteschema.type - parenteschema._specialized_by.append(eschema.type) - - -class MemSchemaSpecializesDel(MemSchemaOperation): - - def commit_event(self): - try: - eschema = self.session.schema.schema_by_eid(self.etypeeid) - parenteschema = self.session.schema.schema_by_eid(self.parentetypeeid) - except KeyError: - # etype removed, nothing to do - return - eschema._specialized_type = None - parenteschema._specialized_by.remove(eschema.type) - - -# deletion hooks ############################################################### - -def before_del_eetype(session, eid): - """before deleting a CWEType entity: - * check that we don't remove a core entity type - * cascade to delete related CWAttribute and CWRelation entities - * instantiate an operation to delete the entity type on commit - """ - # final entities can't be deleted, don't care about that - name = check_internal_entity(session, eid, CORE_ETYPES) - # delete every entities of this type - session.unsafe_execute('DELETE %s X' % name) - DropTable(session, table=SQL_PREFIX + name) - MemSchemaCWETypeDel(session, name) - - -def after_del_eetype(session, eid): - # workflow cleanup - session.execute('DELETE Workflow X WHERE NOT X workflow_of Y') - - -def before_del_ertype(session, eid): - """before deleting a CWRType entity: - * check that we don't remove a core relation type - * cascade to delete related CWAttribute and CWRelation entities - * instantiate an operation to delete the relation type on commit - """ - name = check_internal_entity(session, eid, CORE_RTYPES) - # delete relation definitions using this relation type - session.execute('DELETE CWAttribute X WHERE X relation_type Y, Y eid %(x)s', - {'x': eid}) - session.execute('DELETE CWRelation X WHERE X relation_type Y, Y eid %(x)s', - {'x': eid}) - MemSchemaCWRTypeDel(session, name) - - -def after_del_relation_type(session, rdefeid, rtype, rteid): - """before deleting a CWAttribute or CWRelation entity: - * if this is a final or inlined relation definition, instantiate an - operation to drop necessary column, else if this is the last instance - of a non final relation, instantiate an operation to drop necessary - table - * instantiate an operation to delete the relation definition on commit - * delete the associated relation type when necessary - """ - subjschema, rschema, objschema = session.schema.schema_by_eid(rdefeid) - pendings = session.transaction_data.get('pendingeids', ()) - pendingrdefs = session.transaction_data.setdefault('pendingrdefs', set()) - # first delete existing relation if necessary - if rschema.final: - rdeftype = 'CWAttribute' - pendingrdefs.add((subjschema, rschema)) - else: - rdeftype = 'CWRelation' - pendingrdefs.add((subjschema, rschema, objschema)) - if not (subjschema.eid in pendings or objschema.eid in pendings): - session.execute('DELETE X %s Y WHERE X is %s, Y is %s' - % (rschema, subjschema, objschema)) - execute = session.unsafe_execute - rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R,' - 'R eid %%(x)s' % rdeftype, {'x': rteid}) - lastrel = rset[0][0] == 0 - # we have to update physical schema systematically for final and inlined - # relations, but only if it's the last instance for this relation type - # for other relations - - if (rschema.final or rschema.inlined): - rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R, ' - 'R eid %%(x)s, X from_entity E, E name %%(name)s' - % rdeftype, {'x': rteid, 'name': str(subjschema)}) - if rset[0][0] == 0 and not subjschema.eid in pendings: - ptypes = session.transaction_data.setdefault('pendingrtypes', set()) - ptypes.add(rschema.type) - DropColumn(session, table=SQL_PREFIX + subjschema.type, - column=SQL_PREFIX + rschema.type) - elif lastrel: - DropRelationTable(session, rschema.type) - # if this is the last instance, drop associated relation type - if lastrel and not rteid in pendings: - execute('DELETE CWRType X WHERE X eid %(x)s', {'x': rteid}, 'x') - MemSchemaRDefDel(session, (subjschema, rschema, objschema)) - - -# addition hooks ############################################################### - -def before_add_eetype(session, entity): - """before adding a CWEType entity: - * check that we are not using an existing entity type, - """ - name = entity['name'] - schema = session.schema - if name in schema and schema[name].eid is not None: - raise RepositoryError('an entity type %s already exists' % name) - -def after_add_eetype(session, entity): - """after adding a CWEType entity: - * create the necessary table - * set creation_date and modification_date by creating the necessary - CWAttribute entities - * add owned_by relation by creating the necessary CWRelation entity - * register an operation to add the entity type to the instance's - schema on commit - """ - if entity.get('final'): - return - schema = session.schema - name = entity['name'] - etype = EntityType(name=name, description=entity.get('description'), - meta=entity.get('meta')) # don't care about final - # fake we add it to the schema now to get a correctly initialized schema - # but remove it before doing anything more dangerous... - schema = session.schema - eschema = schema.add_entity_type(etype) - eschema.set_default_groups() - # generate table sql and rql to add metadata - tablesql = eschema2sql(session.pool.source('system').dbhelper, eschema, - prefix=SQL_PREFIX) - relrqls = [] - for rtype in (META_RTYPES - VIRTUAL_RTYPES): - rschema = schema[rtype] - sampletype = rschema.subjects()[0] - desttype = rschema.objects()[0] - props = rschema.rproperties(sampletype, desttype) - relrqls += list(ss.rdef2rql(rschema, name, desttype, props)) - # now remove it ! - schema.del_entity_type(name) - # create the necessary table - for sql in tablesql.split(';'): - if sql.strip(): - session.system_sql(sql) - # register operation to modify the schema on commit - # this have to be done before adding other relations definitions - # or permission settings - etype.eid = entity.eid - MemSchemaCWETypeAdd(session, etype) - # add meta relations - for rql, kwargs in relrqls: - session.execute(rql, kwargs) - - -def before_add_ertype(session, entity): - """before adding a CWRType entity: - * check that we are not using an existing relation type, - * register an operation to add the relation type to the instance's - schema on commit - - We don't know yeat this point if a table is necessary - """ - name = entity['name'] - if name in session.schema.relations(): - raise RepositoryError('a relation type %s already exists' % name) - - -def after_add_ertype(session, entity): - """after a CWRType entity has been added: - * register an operation to add the relation type to the instance's - schema on commit - We don't know yeat this point if a table is necessary - """ - rtype = RelationType(name=entity['name'], - description=entity.get('description'), - meta=entity.get('meta', False), - inlined=entity.get('inlined', False), - symetric=entity.get('symetric', False)) - rtype.eid = entity.eid - MemSchemaCWRTypeAdd(session, rtype) - - -def after_add_efrdef(session, entity): - SourceDbCWAttributeAdd(session, entity=entity) - -def after_add_enfrdef(session, entity): - SourceDbCWRelationAdd(session, entity=entity) - - -# update hooks ################################################################# - -def check_valid_changes(session, entity, ro_attrs=('name', 'final')): - errors = {} - # don't use getattr(entity, attr), we would get the modified value if any - for attr in ro_attrs: - if attr in entity.edited_attributes: - origval, newval = entity_oldnewvalue(entity, attr) - if newval != origval: - errors[attr] = session._("can't change the %s attribute") % \ - display_name(session, attr) - if errors: - raise ValidationError(entity.eid, errors) - -def before_update_eetype(session, entity): - """check name change, handle final""" - check_valid_changes(session, entity, ro_attrs=('final',)) - # don't use getattr(entity, attr), we would get the modified value if any - if 'name' in entity.edited_attributes: - oldname, newname = entity_oldnewvalue(entity, 'name') - if newname.lower() != oldname.lower(): - SourceDbCWETypeRename(session, oldname=oldname, newname=newname) - MemSchemaCWETypeRename(session, oldname=oldname, newname=newname) - -def before_update_ertype(session, entity): - """check name change, handle final""" - check_valid_changes(session, entity) - - -def after_update_erdef(session, entity): - if entity.eid in session.transaction_data.get('pendingeids', ()): - return - desttype = entity.otype.name - rschema = session.schema[entity.rtype.name] - newvalues = {} - for prop in rschema.rproperty_defs(desttype): - if prop == 'constraints': - continue - if prop == 'order': - prop = 'ordernum' - if prop in entity.edited_attributes: - newvalues[prop] = entity[prop] - if newvalues: - subjtype = entity.stype.name - MemSchemaRDefUpdate(session, kobj=(subjtype, desttype), - rschema=rschema, values=newvalues) - SourceDbRDefUpdate(session, kobj=(subjtype, desttype), - rschema=rschema, values=newvalues) - -def after_update_ertype(session, entity): - rschema = session.schema.rschema(entity.name) - newvalues = {} - for prop in ('meta', 'symetric', 'inlined'): - if prop in entity: - newvalues[prop] = entity[prop] - if newvalues: - MemSchemaCWRTypeUpdate(session, rschema=rschema, values=newvalues) - SourceDbCWRTypeUpdate(session, rschema=rschema, values=newvalues, - entity=entity) - -# constraints synchronization hooks ############################################ - -def after_add_econstraint(session, entity): - MemSchemaCWConstraintAdd(session, entity=entity) - SourceDbCWConstraintAdd(session, entity=entity) - - -def after_update_econstraint(session, entity): - MemSchemaCWConstraintAdd(session, entity=entity) - SourceDbCWConstraintAdd(session, entity=entity) - - -def before_delete_constrained_by(session, fromeid, rtype, toeid): - if not fromeid in session.transaction_data.get('pendingeids', ()): - schema = session.schema - entity = session.entity_from_eid(toeid) - subjtype, rtype, objtype = schema.schema_by_eid(fromeid) - try: - cstr = rtype.constraint_by_type(subjtype, objtype, - entity.cstrtype[0].name) - except IndexError: - session.critical('constraint type no more accessible') - else: - SourceDbCWConstraintDel(session, subjtype=subjtype, rtype=rtype, - objtype=objtype, cstr=cstr) - MemSchemaCWConstraintDel(session, subjtype=subjtype, rtype=rtype, - objtype=objtype, cstr=cstr) - - -def after_add_constrained_by(session, fromeid, rtype, toeid): - if fromeid in session.transaction_data.get('neweids', ()): - session.transaction_data.setdefault(fromeid, []).append(toeid) - - -# permissions synchronization hooks ############################################ - -def after_add_permission(session, subject, rtype, object): - """added entity/relation *_permission, need to update schema""" - perm = rtype.split('_', 1)[0] - if session.describe(object)[0] == 'CWGroup': - MemSchemaPermissionCWGroupAdd(session, perm, subject, object) - else: # RQLExpression - expr = session.execute('Any EXPR WHERE X eid %(x)s, X expression EXPR', - {'x': object}, 'x')[0][0] - MemSchemaPermissionRQLExpressionAdd(session, perm, subject, expr) - - -def before_del_permission(session, subject, rtype, object): - """delete entity/relation *_permission, need to update schema - - skip the operation if the related type is being deleted - """ - if subject in session.transaction_data.get('pendingeids', ()): - return - perm = rtype.split('_', 1)[0] - if session.describe(object)[0] == 'CWGroup': - MemSchemaPermissionCWGroupDel(session, perm, subject, object) - else: # RQLExpression - expr = session.execute('Any EXPR WHERE X eid %(x)s, X expression EXPR', - {'x': object}, 'x')[0][0] - MemSchemaPermissionRQLExpressionDel(session, perm, subject, expr) - - -def after_add_specializes(session, subject, rtype, object): - MemSchemaSpecializesAdd(session, etypeeid=subject, parentetypeeid=object) - -def after_del_specializes(session, subject, rtype, object): - MemSchemaSpecializesDel(session, etypeeid=subject, parentetypeeid=object) - - -def _register_schema_hooks(hm): - """register schema related hooks on the hooks manager""" - # schema synchronisation ##################### - # before/after add - hm.register_hook(before_add_eetype, 'before_add_entity', 'CWEType') - hm.register_hook(before_add_ertype, 'before_add_entity', 'CWRType') - hm.register_hook(after_add_eetype, 'after_add_entity', 'CWEType') - hm.register_hook(after_add_ertype, 'after_add_entity', 'CWRType') - hm.register_hook(after_add_efrdef, 'after_add_entity', 'CWAttribute') - hm.register_hook(after_add_enfrdef, 'after_add_entity', 'CWRelation') - # before/after update - hm.register_hook(before_update_eetype, 'before_update_entity', 'CWEType') - hm.register_hook(before_update_ertype, 'before_update_entity', 'CWRType') - hm.register_hook(after_update_ertype, 'after_update_entity', 'CWRType') - hm.register_hook(after_update_erdef, 'after_update_entity', 'CWAttribute') - hm.register_hook(after_update_erdef, 'after_update_entity', 'CWRelation') - # before/after delete - hm.register_hook(before_del_eetype, 'before_delete_entity', 'CWEType') - hm.register_hook(after_del_eetype, 'after_delete_entity', 'CWEType') - hm.register_hook(before_del_ertype, 'before_delete_entity', 'CWRType') - hm.register_hook(after_del_relation_type, 'after_delete_relation', 'relation_type') - hm.register_hook(after_add_specializes, 'after_add_relation', 'specializes') - hm.register_hook(after_del_specializes, 'after_delete_relation', 'specializes') - # constraints synchronization hooks - hm.register_hook(after_add_econstraint, 'after_add_entity', 'CWConstraint') - hm.register_hook(after_update_econstraint, 'after_update_entity', 'CWConstraint') - hm.register_hook(before_delete_constrained_by, 'before_delete_relation', 'constrained_by') - hm.register_hook(after_add_constrained_by, 'after_add_relation', 'constrained_by') - # permissions synchronisation ################ - for perm in ('read_permission', 'add_permission', - 'delete_permission', 'update_permission'): - hm.register_hook(after_add_permission, 'after_add_relation', perm) - hm.register_hook(before_del_permission, 'before_delete_relation', perm) diff -r 15d541321a8c -r 74c1597f8a82 server/schemaserial.py --- a/server/schemaserial.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/schemaserial.py Wed Jan 20 10:13:45 2010 +0100 @@ -9,6 +9,7 @@ import os import sys +import os from itertools import chain from logilab.common.shellutils import ProgressBar @@ -182,7 +183,6 @@ fulltext_container=ft_container, eid=eid) rschema = schema.add_relation_type(rtype) index[eid] = rschema - set_perms(rschema, permsdict.get(eid, {})) cstrsdict = deserialize_rdef_constraints(session) for values in session.execute( 'Any X,SE,RT,OE,CARD,ORD,DESC,IDX,FTIDX,I18N,DFLT WHERE X is CWAttribute,' @@ -201,7 +201,10 @@ indexed=idx, fulltextindexed=ftidx, internationalizable=i18n, default=default, eid=rdefeid) - schema.add_relation_def(rdef) + rdefs = schema.add_relation_def(rdef) + # rdefs can be None on duplicated relation definitions (e.g. symetrics) + if rdefs: + set_perms(rdefs, permsdict.get(rdefeid, {})) for values in session.execute( 'Any X,SE,RT,OE,CARD,ORD,DESC,C WHERE X is CWRelation, X relation_type RT,' 'X cardinality CARD, X ordernum ORD, X description DESC, ' @@ -215,7 +218,10 @@ order=ord, description=desc, composite=c, constraints=constraints, eid=rdefeid) - schema.add_relation_def(rdef) + rdefs = schema.add_relation_def(rdef) + # rdefs can be None on duplicated relation definitions (e.g. symetrics) + if rdefs: + set_perms(rdefs, permsdict.get(rdefeid, {})) schema.infer_specialization_rules() if _3_2_migration: _update_database(schema, sqlcu) @@ -256,7 +262,7 @@ actperms.append(erschema.rql_expression(*something)) else: # group name actperms.append(something) - erschema.set_permissions(action, actperms) + erschema.set_action_permissions(action, actperms) def deserialize_rdef_constraints(session): @@ -277,11 +283,13 @@ """synchronize schema and permissions in the database according to current schema """ - _title = '-> storing the schema in the database ' - print _title, + quiet = os.environ.get('APYCOT_ROOT') + if not quiet: + _title = '-> storing the schema in the database ' + print _title, eschemas = schema.entities() aller = eschemas + schema.relations() - if not verbose and not os.environ.get('APYCOT_ROOT'): + if not verbose and not quiet: pb_size = len(aller) + len(CONSTRAINTS) + len([x for x in eschemas if x.specializes()]) pb = ProgressBar(pb_size, title=_title) else: @@ -300,14 +308,10 @@ if pb is not None: pb.update() continue - for rql, kwargs in erschema2rql(schema[ertype]): + for rql, kwargs in erschema2rql(schema[ertype], groupmap): if verbose: print rql % kwargs cursor.execute(rql, kwargs) - for rql, kwargs in erperms2rql(schema[ertype], groupmap): - if verbose: - print rql - cursor.execute(rql, kwargs) if pb is not None: pb.update() for rql, kwargs in specialize2rql(schema): @@ -316,7 +320,8 @@ cursor.execute(rql, kwargs) if pb is not None: pb.update() - print + if not quiet: + print def _ervalues(erschema): @@ -358,8 +363,8 @@ def _rdef_values(rschema, objtype, props): amap = {'order': 'ordernum'} values = {} - for prop, default in rschema.rproperty_defs(objtype).iteritems(): - if prop in ('eid', 'constraints', 'uid', 'infered'): + for prop, default in schemamod.RelationDefinitionSchema.rproperty_defs(objtype).iteritems(): + if prop in ('eid', 'constraints', 'uid', 'infered', 'permissions'): continue value = props.get(prop, default) if prop in ('indexed', 'fulltextindexed', 'internationalizable'): @@ -390,17 +395,23 @@ return relations, values -def __rdef2rql(genmap, rschema, subjtype=None, objtype=None, props=None): +def __rdef2rql(genmap, rschema, subjtype=None, objtype=None, props=None, + groupmap=None): if subjtype is None: assert objtype is None assert props is None - targets = rschema.iter_rdefs() + targets = sorted(rschema.rdefs) else: assert not objtype is None targets = [(subjtype, objtype)] + # relation schema + if rschema.final: + etype = 'CWAttribute' + else: + etype = 'CWRelation' for subjtype, objtype in targets: if props is None: - _props = rschema.rproperties(subjtype, objtype) + _props = rschema.rdef(subjtype, objtype) else: _props = props # don't serialize infered relations @@ -409,6 +420,15 @@ gen = genmap[rschema.final] for rql, values in gen(rschema, subjtype, objtype, _props): yield rql, values + # no groupmap means "no security insertion" + if groupmap: + for rql, args in _erperms2rql(_props, groupmap): + args['st'] = str(subjtype) + args['rt'] = str(rschema) + args['ot'] = str(objtype) + yield rql + 'X is %s, X from_entity ST, X to_entity OT, '\ + 'X relation_type RT, RT name %%(rt)s, ST name %%(st)s, '\ + 'OT name %%(ot)s' % etype, args def schema2rql(schema, skip=None, allow=None): @@ -424,12 +444,12 @@ return chain(*[erschema2rql(schema[t]) for t in all if t in allow]) return chain(*[erschema2rql(schema[t]) for t in all]) -def erschema2rql(erschema): +def erschema2rql(erschema, groupmap): if isinstance(erschema, schemamod.EntitySchema): - return eschema2rql(erschema) - return rschema2rql(erschema) + return eschema2rql(erschema, groupmap=groupmap) + return rschema2rql(erschema, groupmap=groupmap) -def eschema2rql(eschema): +def eschema2rql(eschema, groupmap=None): """return a list of rql insert statements to enter an entity schema in the database as an CWEType entity """ @@ -437,6 +457,11 @@ # NOTE: 'specializes' relation can't be inserted here since there's no # way to make sure the parent type is inserted before the child type yield 'INSERT CWEType X: %s' % ','.join(relations) , values + # entity permissions + if groupmap is not None: + for rql, args in _erperms2rql(eschema, groupmap): + args['name'] = str(eschema) + yield rql + 'X is CWEType, X name %(name)s', args def specialize2rql(schema): for eschema in schema.entities(): @@ -449,7 +474,7 @@ values = {'x': eschema.type, 'et': specialized_type.type} yield 'SET X specializes ET WHERE X name %(x)s, ET name %(et)s', values -def rschema2rql(rschema, addrdef=True): +def rschema2rql(rschema, addrdef=True, groupmap=None): """return a list of rql insert statements to enter a relation schema in the database as an CWRType entity """ @@ -458,12 +483,12 @@ relations, values = rschema_relations_values(rschema) yield 'INSERT CWRType X: %s' % ','.join(relations), values if addrdef: - for rql, values in rdef2rql(rschema): + for rql, values in rdef2rql(rschema, groupmap=groupmap): yield rql, values -def rdef2rql(rschema, subjtype=None, objtype=None, props=None): +def rdef2rql(rschema, subjtype=None, objtype=None, props=None, groupmap=None): genmap = {True: frdef2rql, False: nfrdef2rql} - return __rdef2rql(genmap, rschema, subjtype, objtype, props) + return __rdef2rql(genmap, rschema, subjtype, objtype, props, groupmap) _LOCATE_RDEF_RQL0 = 'X relation_type ER,X from_entity SE,X to_entity OE' @@ -487,7 +512,7 @@ def rdefrelations2rql(rschema, subjtype, objtype, props): iterators = [] - for constraint in props['constraints']: + for constraint in props.constraints: iterators.append(constraint2rql(rschema, subjtype, objtype, constraint)) return chain(*iterators) @@ -499,43 +524,30 @@ CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, \ ER name %(rt)s, SE name %(se)s, OE name %(oe)s', values -def perms2rql(schema, groupmapping): - """return rql insert statements to enter the schema's permissions in - the database as [read|add|delete|update]_permission relations between - CWEType/CWRType and CWGroup entities - groupmapping is a dictionnary mapping standard group names to - eids - """ - for etype in sorted(schema.entities()): - yield erperms2rql(schema[etype], groupmapping) - for rtype in sorted(schema.relations()): - yield erperms2rql(schema[rtype], groupmapping) - -def erperms2rql(erschema, groupmapping): +def _erperms2rql(erschema, groupmap): """return rql insert statements to enter the entity or relation schema's permissions in the database as [read|add|delete|update]_permission relations between CWEType/CWRType and CWGroup entities """ - etype = isinstance(erschema, schemamod.EntitySchema) and 'CWEType' or 'CWRType' for action in erschema.ACTIONS: - for group in sorted(erschema.get_groups(action)): - try: - yield ('SET X %s_permission Y WHERE X is %s, X name %%(name)s, Y eid %%(g)s' - % (action, etype), {'name': str(erschema), - 'g': groupmapping[group]}) - except KeyError: - continue - for rqlexpr in sorted(erschema.get_rqlexprs(action)): - yield ('INSERT RQLExpression E: E expression %%(e)s, E exprtype %%(t)s, ' - 'E mainvars %%(v)s, X %s_permission E ' - 'WHERE X is %s, X name %%(name)s' % (action, etype), - {'e': unicode(rqlexpr.expression), - 'v': unicode(rqlexpr.mainvars), - 't': unicode(rqlexpr.__class__.__name__), - 'name': unicode(erschema) - }) + for group_or_rqlexpr in erschema.action_permissions(action): + if isinstance(group_or_rqlexpr, basestring): + # group + try: + yield ('SET X %s_permission Y WHERE Y eid %%(g)s, ' % action, + {'g': groupmap[group_or_rqlexpr]}) + except KeyError: + continue + else: + # rqlexpr + rqlexpr = group_or_rqlexpr + yield ('INSERT RQLExpression E: E expression %%(e)s, E exprtype %%(t)s, ' + 'E mainvars %%(v)s, X %s_permission E WHERE ' % action, + {'e': unicode(rqlexpr.expression), + 'v': unicode(rqlexpr.mainvars), + 't': unicode(rqlexpr.__class__.__name__)}) def updateeschema2rql(eschema): diff -r 15d541321a8c -r 74c1597f8a82 server/securityhooks.py --- a/server/securityhooks.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,106 +0,0 @@ -"""Security hooks: check permissions to add/delete/update entities according to -the user connected to a session - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -from cubicweb import Unauthorized -from cubicweb.server.pool import LateOperation -from cubicweb.server import BEFORE_ADD_RELATIONS, ON_COMMIT_ADD_RELATIONS - -def check_entity_attributes(session, entity): - eid = entity.eid - eschema = entity.e_schema - # ._default_set is only there on entity creation to indicate unspecified - # attributes which has been set to a default value defined in the schema - defaults = getattr(entity, '_default_set', ()) - try: - editedattrs = entity.edited_attributes - except AttributeError: - editedattrs = entity.keys() - for attr in editedattrs: - if attr in defaults: - continue - rschema = eschema.subjrels[attr] - if rschema.final: # non final relation are checked by other hooks - # add/delete should be equivalent (XXX: unify them into 'update' ?) - rschema.check_perm(session, 'add', eid) - - -class CheckEntityPermissionOp(LateOperation): - def precommit_event(self): - #print 'CheckEntityPermissionOp', self.session.user, self.entity, self.action - self.entity.check_perm(self.action) - check_entity_attributes(self.session, self.entity) - - def commit_event(self): - pass - - -class CheckRelationPermissionOp(LateOperation): - def precommit_event(self): - self.rschema.check_perm(self.session, self.action, self.fromeid, self.toeid) - - def commit_event(self): - pass - -def after_add_entity(session, entity): - if not session.is_super_session: - CheckEntityPermissionOp(session, entity=entity, action='add') - -def after_update_entity(session, entity): - if not session.is_super_session: - try: - # check user has permission right now, if not retry at commit time - entity.check_perm('update') - check_entity_attributes(session, entity) - except Unauthorized: - entity.clear_local_perm_cache('update') - CheckEntityPermissionOp(session, entity=entity, action='update') - -def before_del_entity(session, eid): - if not session.is_super_session: - eschema = session.repo.schema[session.describe(eid)[0]] - eschema.check_perm(session, 'delete', eid) - - -def before_add_relation(session, fromeid, rtype, toeid): - if rtype in BEFORE_ADD_RELATIONS and not session.is_super_session: - nocheck = session.transaction_data.get('skip-security', ()) - if (fromeid, rtype, toeid) in nocheck: - return - rschema = session.repo.schema[rtype] - rschema.check_perm(session, 'add', fromeid, toeid) - -def after_add_relation(session, fromeid, rtype, toeid): - if not rtype in BEFORE_ADD_RELATIONS and not session.is_super_session: - nocheck = session.transaction_data.get('skip-security', ()) - if (fromeid, rtype, toeid) in nocheck: - return - rschema = session.repo.schema[rtype] - if rtype in ON_COMMIT_ADD_RELATIONS: - CheckRelationPermissionOp(session, action='add', rschema=rschema, - fromeid=fromeid, toeid=toeid) - else: - rschema.check_perm(session, 'add', fromeid, toeid) - -def before_del_relation(session, fromeid, rtype, toeid): - if not session.is_super_session: - nocheck = session.transaction_data.get('skip-security', ()) - if (fromeid, rtype, toeid) in nocheck: - return - session.repo.schema[rtype].check_perm(session, 'delete', fromeid, toeid) - -def register_security_hooks(hm): - """register meta-data related hooks on the hooks manager""" - hm.register_hook(after_add_entity, 'after_add_entity', '') - hm.register_hook(after_update_entity, 'after_update_entity', '') - hm.register_hook(before_del_entity, 'before_delete_entity', '') - hm.register_hook(before_add_relation, 'before_add_relation', '') - hm.register_hook(after_add_relation, 'after_add_relation', '') - hm.register_hook(before_del_relation, 'before_delete_relation', '') - diff -r 15d541321a8c -r 74c1597f8a82 server/serverconfig.py --- a/server/serverconfig.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/serverconfig.py Wed Jan 20 10:13:45 2010 +0100 @@ -86,7 +86,7 @@ """standalone RQL server""" name = 'repository' - cubicweb_appobject_path = CubicWebConfiguration.cubicweb_appobject_path | set(['sobjects']) + cubicweb_appobject_path = CubicWebConfiguration.cubicweb_appobject_path | set(['sobjects', 'hooks']) cube_appobject_path = CubicWebConfiguration.cube_appobject_path | set(['sobjects', 'hooks']) options = merge_options(( @@ -187,14 +187,56 @@ # check user's state at login time consider_user_state = True - # hooks registration configuration + # hooks activation configuration # all hooks should be activated during normal execution - core_hooks = True - usergroup_hooks = True - schema_hooks = True - notification_hooks = True - security_hooks = True - instance_hooks = True + disabled_hooks_categories = set() + enabled_hooks_categories = set() + ALLOW_ALL = object() + DENY_ALL = object() + hooks_mode = ALLOW_ALL + + @classmethod + def set_hooks_mode(cls, mode): + assert mode is cls.ALLOW_ALL or mode is cls.DENY_ALL + oldmode = cls.hooks_mode + cls.hooks_mode = mode + return oldmode + + @classmethod + def disable_hook_category(cls, *categories): + changes = set() + if cls.hooks_mode is cls.DENY_ALL: + for category in categories: + if category in cls.enabled_hooks_categories: + cls.enabled_hooks_categories.remove(category) + changes.add(category) + else: + for category in categories: + if category not in cls.disabled_hooks_categories: + cls.disabled_hooks_categories.add(category) + changes.add(category) + return changes + + @classmethod + def enable_hook_category(cls, *categories): + changes = set() + if cls.hooks_mode is cls.DENY_ALL: + for category in categories: + if category not in cls.enabled_hooks_categories: + cls.enabled_hooks_categories.add(category) + changes.add(category) + else: + for category in categories: + if category in cls.disabled_hooks_categories: + cls.disabled_hooks_categories.remove(category) + changes.add(category) + return changes + + @classmethod + def is_hook_activated(cls, hook): + if cls.hooks_mode is cls.DENY_ALL: + return hook.category in cls.enabled_hooks_categories + return hook.category not in cls.disabled_hooks_categories # should some hooks be deactivated during [pre|post]create script execution free_wheel = False @@ -259,20 +301,6 @@ """pyro is always enabled in standalone repository configuration""" return True - def load_hooks(self, vreg): - hooks = {} - try: - apphookdefs = vreg['hooks'].all_objects() - except RegistryNotFound: - return hooks - for hookdef in apphookdefs: - for event, ertype in hookdef.register_to(): - if ertype == 'Any': - ertype = '' - cb = hookdef.make_callback(event) - hooks.setdefault(event, {}).setdefault(ertype, []).append(cb) - return hooks - def load_schema(self, expand_cubes=False, **kwargs): from cubicweb.schema import CubicWebSchemaLoader if expand_cubes: diff -r 15d541321a8c -r 74c1597f8a82 server/serverctl.py --- a/server/serverctl.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/serverctl.py Wed Jan 20 10:13:45 2010 +0100 @@ -14,9 +14,8 @@ from logilab.common.clcommands import register_commands, cmd_run, pop_arg from logilab.common.shellutils import ASK -from cubicweb import (AuthenticationError, ExecutionError, ConfigurationError, - underline_title) -from cubicweb.toolsutils import Command, CommandHandler +from cubicweb import AuthenticationError, ExecutionError, ConfigurationError +from cubicweb.toolsutils import Command, CommandHandler, underline_title from cubicweb.server import SOURCE_TYPES from cubicweb.server.utils import ask_source_config from cubicweb.server.serverconfig import (USER_OPTIONS, ServerConfiguration, @@ -115,7 +114,7 @@ login, pwd = manager_userpasswd() while True: try: - return in_memory_cnx(config, login, pwd) + return in_memory_cnx(config, login, password=pwd) except AuthenticationError: print '-> Error: wrong user/password.' # reset cubes else we'll have an assertion error on next retry diff -r 15d541321a8c -r 74c1597f8a82 server/session.py --- a/server/session.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/session.py Wed Jan 20 10:13:45 2010 +0100 @@ -15,7 +15,8 @@ from rql.nodes import VariableRef, Function, ETYPE_PYOBJ_MAP, etype_from_pyobj from yams import BASE_TYPES -from cubicweb import RequestSessionMixIn, Binary, UnknownEid +from cubicweb import Binary, UnknownEid +from cubicweb.req import RequestSessionBase from cubicweb.dbapi import ConnectionProperties from cubicweb.utils import make_uid from cubicweb.rqlrewrite import RQLRewriter @@ -41,7 +42,7 @@ return description -class Session(RequestSessionMixIn): +class Session(RequestSessionBase): """tie session id, user, connections pool and other session data all together """ @@ -74,10 +75,6 @@ return '<%ssession %s (%s 0x%x)>' % (self.cnxtype, self.user.login, self.id, id(self)) - @property - def schema(self): - return self.repo.schema - def hijack_user(self, user): """return a fake request/session using specified user""" session = Session(user, self.repo) @@ -165,10 +162,10 @@ rset.description = list(rset.description) rset.description.append([self.describe(targeteid)[0]]) targetentity = self.entity_from_eid(targeteid) - if targetentity.rset is None: - targetentity.rset = rset - targetentity.row = rset.rowcount - targetentity.col = 0 + if targetentity.cw_rset is None: + targetentity.cw_rset = rset + targetentity.cw_row = rset.rowcount + targetentity.cw_col = 0 rset.rowcount += 1 entities.append(targetentity) entity._related_cache['%s_%s' % (rtype, role)] = (rset, tuple(entities)) @@ -235,6 +232,19 @@ assert prop == 'lang' # this is the only one changeable property for now self.set_language(value) + def deleted_in_transaction(self, eid): + return eid in self.transaction_data.get('pendingeids', ()) + + def added_in_transaction(self, eid): + return eid in self.transaction_data.get('neweids', ()) + + def schema_rproperty(self, rtype, eidfrom, eidto, rprop): + rschema = self.repo.schema[rtype] + subjtype = self.describe(eidfrom)[0] + objtype = self.describe(eidto)[0] + rdef = rschema.rdef(subjtype, objtype) + return rdef.get(rprop) + # connection management ################################################### def keep_pool_mode(self, mode): @@ -332,6 +342,11 @@ # request interface ####################################################### + @property + def cursor(self): + """return a rql cursor""" + return self + def set_entity_cache(self, entity): # XXX session level caching may be a pb with multiple repository # instances, but 1. this is probably not the only one :$ and 2. it @@ -418,11 +433,6 @@ return self.super_session.execute(rql, kwargs, eid_key, build_descr, propagate) - @property - def cursor(self): - """return a rql cursor""" - return self - def execute(self, rql, kwargs=None, eid_key=None, build_descr=True, propagate=False): """db-api like method directly linked to the querier execute method @@ -548,7 +558,6 @@ self._threaddata.pending_operations = [] return self._threaddata.pending_operations - def add_operation(self, operation, index=None): """add an observer""" assert self.commit_state != 'commit' @@ -628,12 +637,19 @@ description.append(tuple(row_descr)) return description - @deprecated("use vreg['etypes'].etype_class(etype)") + # deprecated ############################################################### + + @property + @deprecated("[3.6] use session.vreg.schema") + def schema(self): + return self.repo.schema + + @deprecated("[3.4] use vreg['etypes'].etype_class(etype)") def etype_class(self, etype): """return an entity class for the given entity type""" return self.vreg['etypes'].etype_class(etype) - @deprecated('use direct access to session.transaction_data') + @deprecated('[3.4] use direct access to session.transaction_data') def query_data(self, key, default=None, setdefault=False, pop=False): if setdefault: assert not pop @@ -643,7 +659,7 @@ else: return self.transaction_data.get(key, default) - @deprecated('use entity_from_eid(eid, etype=None)') + @deprecated('[3.4] use entity_from_eid(eid, etype=None)') def entity(self, eid): """return a result set for the given eid""" return self.entity_from_eid(eid) @@ -661,6 +677,7 @@ # session which has created this one self.parent_session = parent_session self.user = InternalManager() + self.user.req = self # XXX remove when "vreg = user.req.vreg" hack in entity.py is gone self.repo = parent_session.repo self.vreg = parent_session.vreg self.data = parent_session.data @@ -730,8 +747,9 @@ """special session created internaly by the repository""" def __init__(self, repo, cnxprops=None): - super(InternalSession, self).__init__(_IMANAGER, repo, cnxprops, + super(InternalSession, self).__init__(InternalManager(), repo, cnxprops, _id='internal') + self.user.req = self # XXX remove when "vreg = user.req.vreg" hack in entity.py is gone self.cnxtype = 'inmemory' self.is_internal_session = True self.is_super_session = True @@ -768,7 +786,6 @@ return 'en' return None -_IMANAGER = InternalManager() from logging import getLogger from cubicweb import set_log_methods diff -r 15d541321a8c -r 74c1597f8a82 server/sources/__init__.py --- a/server/sources/__init__.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/sources/__init__.py Wed Jan 20 10:13:45 2010 +0100 @@ -291,7 +291,7 @@ """ pass - def authenticate(self, session, login, password): + def authenticate(self, session, login, **kwargs): """if the source support CWUser entity type, it should implements this method which should return CWUser eid for the given login/password if this account is defined in this source and valid login / password is diff -r 15d541321a8c -r 74c1597f8a82 server/sources/ldapuser.py --- a/server/sources/ldapuser.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/sources/ldapuser.py Wed Jan 20 10:13:45 2010 +0100 @@ -237,14 +237,15 @@ self._connect() return ConnectionWrapper(self._conn) - def authenticate(self, session, login, password): + def authenticate(self, session, login, password=None, **kwargs): """return CWUser eid for the given login/password if this account is defined in this source, else raise `AuthenticationError` two queries are needed since passwords are stored crypted, so we have to fetch the salt first """ - assert login, 'no login!' + if password is None: + raise AuthenticationError() searchfilter = [filter_format('(%s=%s)', (self.user_login_attr, login))] searchfilter.extend([filter_format('(%s=%s)', ('objectClass', o)) for o in self.user_classes]) diff -r 15d541321a8c -r 74c1597f8a82 server/sources/native.py --- a/server/sources/native.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/sources/native.py Wed Jan 20 10:13:45 2010 +0100 @@ -96,11 +96,6 @@ """adapter for source using the native cubicweb schema (see below) """ sqlgen_class = SQLGenerator - - passwd_rql = "Any P WHERE X is CWUser, X login %(login)s, X upassword P" - auth_rql = "Any X WHERE X is CWUser, X login %(login)s, X upassword %(pwd)s" - _sols = ({'X': 'CWUser', 'P': 'Password'},) - options = ( ('db-driver', {'type' : 'string', @@ -148,6 +143,7 @@ def __init__(self, repo, appschema, source_config, *args, **kwargs): SQLAdapterMixIn.__init__(self, source_config) + self.authentifiers = [LoginPasswordAuthentifier(self)] AbstractSource.__init__(self, repo, appschema, source_config, *args, **kwargs) # sql generator @@ -182,6 +178,11 @@ # consuming, find another way return SQLAdapterMixIn.get_connection(self) + def add_authentifier(self, authentifier): + self.authentifiers.append(authentifier) + authentifier.source = self + authentifier.set_schema(self.schema) + def reset_caches(self): """method called during test to reset potential source caches""" self._cache = Cache(self.repo.config['rql-cache-size']) @@ -232,10 +233,10 @@ # ISource interface ####################################################### - def compile_rql(self, rql): + def compile_rql(self, rql, sols): rqlst = self.repo.vreg.rqlhelper.parse(rql) rqlst.restricted_vars = () - rqlst.children[0].solutions = self._sols + rqlst.children[0].solutions = sols self.repo.querier.sqlgen_annotate(rqlst) set_qdata(self.schema.rschema, rqlst, ()) return rqlst @@ -249,10 +250,8 @@ self._rql_sqlgen.schema = schema except AttributeError: pass # __init__ - if 'CWUser' in schema: # probably an empty schema if not true... - # rql syntax trees used to authenticate users - self._passwd_rqlst = self.compile_rql(self.passwd_rql) - self._auth_rqlst = self.compile_rql(self.auth_rql) + for authentifier in self.authentifiers: + authentifier.set_schema(self.schema) def support_entity(self, etype, write=False): """return true if the given entity's type is handled by this adapter @@ -273,30 +272,16 @@ def may_cross_relation(self, rtype): return True - def authenticate(self, session, login, password): - """return CWUser eid for the given login/password if this account is - defined in this source, else raise `AuthenticationError` - - two queries are needed since passwords are stored crypted, so we have - to fetch the salt first + def authenticate(self, session, login, **kwargs): + """return CWUser eid for the given login and other authentication + information found in kwargs, else raise `AuthenticationError` """ - args = {'login': login, 'pwd' : password} - if password is not None: - rset = self.syntax_tree_search(session, self._passwd_rqlst, args) + for authentifier in self.authentifiers: try: - pwd = rset[0][0] - except IndexError: - raise AuthenticationError('bad login') - # passwords are stored using the Bytes type, so we get a StringIO - if pwd is not None: - args['pwd'] = Binary(crypt_password(password, pwd.getvalue()[:2])) - # get eid from login and (crypted) password - # XXX why not simply compare password? - rset = self.syntax_tree_search(session, self._auth_rqlst, args) - try: - return rset[0][0] - except IndexError: - raise AuthenticationError('bad password') + return authentifier.authenticate(session, login, **kwargs) + except AuthenticationError: + continue + raise AuthenticationError() def syntax_tree_search(self, session, union, args=None, cachekey=None, varmap=None): @@ -534,7 +519,7 @@ if extid is not None: assert isinstance(extid, str) extid = b64encode(extid) - attrs = {'type': entity.id, 'eid': entity.eid, 'extid': extid, + attrs = {'type': entity.__regid__, 'eid': entity.eid, 'extid': extid, 'source': source.uri, 'mtime': datetime.now()} session.system_sql(self.sqlgen.insert('entities', attrs), attrs) @@ -644,3 +629,49 @@ result += 'GRANT ALL ON deleted_entities TO %s;\n' % user result += 'GRANT ALL ON entities_id_seq TO %s;\n' % user return result + + +class BaseAuthentifier(object): + + def __init__(self, source=None): + self.source = source + + def set_schema(self, schema): + """set the instance'schema""" + pass + +class LoginPasswordAuthentifier(BaseAuthentifier): + passwd_rql = "Any P WHERE X is CWUser, X login %(login)s, X upassword P" + auth_rql = "Any X WHERE X is CWUser, X login %(login)s, X upassword %(pwd)s" + _sols = ({'X': 'CWUser', 'P': 'Password'},) + + def set_schema(self, schema): + """set the instance'schema""" + if 'CWUser' in schema: # probably an empty schema if not true... + # rql syntax trees used to authenticate users + self._passwd_rqlst = self.source.compile_rql(self.passwd_rql, self._sols) + self._auth_rqlst = self.source.compile_rql(self.auth_rql, self._sols) + + def authenticate(self, session, login, password=None, **kwargs): + """return CWUser eid for the given login/password if this account is + defined in this source, else raise `AuthenticationError` + + two queries are needed since passwords are stored crypted, so we have + to fetch the salt first + """ + args = {'login': login, 'pwd' : password} + if password is not None: + rset = self.source.syntax_tree_search(session, self._passwd_rqlst, args) + try: + pwd = rset[0][0] + except IndexError: + raise AuthenticationError('bad login') + # passwords are stored using the Bytes type, so we get a StringIO + if pwd is not None: + args['pwd'] = Binary(crypt_password(password, pwd.getvalue()[:2])) + # get eid from login and (crypted) password + rset = self.source.syntax_tree_search(session, self._auth_rqlst, args) + try: + return rset[0][0] + except IndexError: + raise AuthenticationError('bad password') diff -r 15d541321a8c -r 74c1597f8a82 server/sqlutils.py --- a/server/sqlutils.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/sqlutils.py Wed Jan 20 10:13:45 2010 +0100 @@ -22,7 +22,7 @@ from cubicweb import Binary, ConfigurationError from cubicweb.utils import todate, todatetime -from cubicweb.common.uilib import remove_html_tags +from cubicweb.uilib import remove_html_tags from cubicweb.toolsutils import restrict_perms_to_user from cubicweb.schema import PURE_VIRTUAL_RTYPES from cubicweb.server import SQL_CONNECT_HOOKS diff -r 15d541321a8c -r 74c1597f8a82 server/ssplanner.py --- a/server/ssplanner.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/ssplanner.py Wed Jan 20 10:13:45 2010 +0100 @@ -376,6 +376,7 @@ def execute(self): """execute this step""" + results = self.execute_child() todelete = frozenset(typed_eid(eid) for eid, in self.execute_child()) session = self.plan.session delete = session.repo.glob_delete_entity @@ -385,7 +386,7 @@ pending |= actual for eid in actual: delete(session, eid) - + return results class DeleteRelationsStep(Step): """step consisting in deleting relations""" diff -r 15d541321a8c -r 74c1597f8a82 server/test/data/hooks.py --- a/server/test/data/hooks.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/data/hooks.py Wed Jan 20 10:13:45 2010 +0100 @@ -5,27 +5,31 @@ :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ -from cubicweb.server.hooksmanager import SystemHook +from cubicweb.server.hook import Hook CALLED_EVENTS = {} -class StartupHook(SystemHook): +class StartupHook(Hook): + __regid__ = 'mystartup' events = ('server_startup',) - def call(self, repo): + def __call__(self): CALLED_EVENTS['server_startup'] = True -class ShutdownHook(SystemHook): +class ShutdownHook(Hook): + __regid__ = 'myshutdown' events = ('server_shutdown',) - def call(self, repo): + def __call__(self): CALLED_EVENTS['server_shutdown'] = True -class LoginHook(SystemHook): +class LoginHook(Hook): + __regid__ = 'mylogin' events = ('session_open',) - def call(self, session): - CALLED_EVENTS['session_open'] = session.user.login + def __call__(self): + CALLED_EVENTS['session_open'] = self._cw.user.login -class LogoutHook(SystemHook): +class LogoutHook(Hook): + __regid__ = 'mylogout' events = ('session_close',) - def call(self, session): - CALLED_EVENTS['session_close'] = session.user.login + def __call__(self): + CALLED_EVENTS['session_close'] = self._cw.user.login diff -r 15d541321a8c -r 74c1597f8a82 server/test/data/migratedapp/schema.py --- a/server/test/data/migratedapp/schema.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/data/migratedapp/schema.py Wed Jan 20 10:13:45 2010 +0100 @@ -13,7 +13,7 @@ ERQLExpression, RRQLExpression) class Affaire(EntityType): - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'add': ('managers', ERQLExpression('X concerne S, S owned_by U')), 'update': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')), @@ -27,7 +27,7 @@ concerne = SubjectRelation('Societe') class concerne(RelationType): - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'add': ('managers', RRQLExpression('U has_update_permission S')), 'delete': ('managers', RRQLExpression('O owned_by U')), @@ -42,7 +42,7 @@ class Note(Para): __specializes_schema__ = True - permissions = {'read': ('managers', 'users', 'guests',), + __permissions__ = {'read': ('managers', 'users', 'guests',), 'update': ('managers', 'owners',), 'delete': ('managers', ), 'add': ('managers', @@ -63,7 +63,7 @@ summary = String(maxsize=512) class ecrit_par(RelationType): - permissions = {'read': ('managers', 'users', 'guests',), + __permissions__ = {'read': ('managers', 'users', 'guests',), 'delete': ('managers', ), 'add': ('managers', RRQLExpression('O require_permission P, P name "add_note", ' @@ -105,7 +105,7 @@ connait = SubjectRelation('Personne', symetric=True) class Societe(WorkflowableEntityType): - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'update': ('managers', 'owners'), 'delete': ('managers', 'owners'), diff -r 15d541321a8c -r 74c1597f8a82 server/test/data/migration/postcreate.py --- a/server/test/data/migration/postcreate.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/data/migration/postcreate.py Wed Jan 20 10:13:45 2010 +0100 @@ -11,7 +11,7 @@ done = wf.add_state(u'done') wf.add_transition(u'redoit', done, todo) wf.add_transition(u'markasdone', todo, done) -checkpoint() +commit() wf = add_workflow(u'affaire workflow', 'Affaire') pitetre = wf.add_state(u'pitetre', initial=True) @@ -21,5 +21,5 @@ wf.add_transition(u'abort', pitetre, bennon) wf.add_transition(u'start', pitetre, encours) wf.add_transition(u'end', encours, finie) -checkpoint() +commit() diff -r 15d541321a8c -r 74c1597f8a82 server/test/data/schema.py --- a/server/test/data/schema.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/data/schema.py Wed Jan 20 10:13:45 2010 +0100 @@ -13,7 +13,7 @@ ERQLExpression, RRQLExpression) class Affaire(WorkflowableEntityType): - permissions = { + __permissions__ = { 'read': ('managers', ERQLExpression('X owned_by U'), ERQLExpression('X concerne S?, S owned_by U')), 'add': ('managers', ERQLExpression('X concerne S, S owned_by U')), @@ -39,7 +39,7 @@ class Societe(EntityType): - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'update': ('managers', 'owners', ERQLExpression('U login L, X nom L')), 'delete': ('managers', 'owners', ERQLExpression('U login L, X nom L')), @@ -121,28 +121,28 @@ symetric = True class concerne(RelationType): - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'add': ('managers', RRQLExpression('U has_update_permission S')), 'delete': ('managers', RRQLExpression('O owned_by U')), } class travaille(RelationType): - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'add': ('managers', RRQLExpression('U has_update_permission S')), 'delete': ('managers', RRQLExpression('O owned_by U')), } class para(RelationType): - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'add': ('managers', ERQLExpression('X in_state S, S name "todo"')), 'delete': ('managers', ERQLExpression('X in_state S, S name "todo"')), } class test(RelationType): - permissions = {'read': ('managers', 'users', 'guests'), + __permissions__ = {'read': ('managers', 'users', 'guests'), 'delete': ('managers',), 'add': ('managers',)} diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_checkintegrity.py --- a/server/test/unittest_checkintegrity.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/unittest_checkintegrity.py Wed Jan 20 10:13:45 2010 +0100 @@ -13,7 +13,7 @@ from cubicweb.server.checkintegrity import check -repo, cnx = init_test_database('sqlite') +repo, cnx = init_test_database() class CheckIntegrityTC(TestCase): def test(self): diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_hook.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/unittest_hook.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,190 @@ +# -*- coding: utf-8 -*- +"""unit/functional tests for cubicweb.server.hook + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" + +from logilab.common.testlib import TestCase, unittest_main, mock_object + + +from cubicweb.devtools import TestServerConfiguration +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.selectors import implements +from cubicweb.server import hook +from cubicweb.hooks import integrity, syncschema + + +def clean_session_ops(func): + def wrapper(self, *args, **kwargs): + try: + return func(self, *args, **kwargs) + finally: + self.session.pending_operations[:] = [] + return wrapper + +class OperationsTC(CubicWebTC): + + def setUp(self): + CubicWebTC.setUp(self) + self.hm = self.repo.hm + + @clean_session_ops + def test_late_operation(self): + session = self.session + l1 = hook.LateOperation(session) + l2 = hook.LateOperation(session) + l3 = hook.Operation(session) + self.assertEquals(session.pending_operations, [l3, l1, l2]) + + @clean_session_ops + def test_single_last_operation(self): + session = self.session + l0 = hook.SingleLastOperation(session) + l1 = hook.LateOperation(session) + l2 = hook.LateOperation(session) + l3 = hook.Operation(session) + self.assertEquals(session.pending_operations, [l3, l1, l2, l0]) + l4 = hook.SingleLastOperation(session) + self.assertEquals(session.pending_operations, [l3, l1, l2, l4]) + + @clean_session_ops + def test_global_operation_order(self): + session = self.session + op1 = integrity._DelayedDeleteOp(session) + op2 = syncschema.MemSchemaRDefDel(session) + # equivalent operation generated by op2 but replace it here by op3 so we + # can check the result... + op3 = syncschema.MemSchemaNotifyChanges(session) + op4 = integrity._DelayedDeleteOp(session) + op5 = integrity._CheckORelationOp(session) + self.assertEquals(session.pending_operations, [op1, op2, op4, op5, op3]) + + +class HookCalled(Exception): pass + +config = TestServerConfiguration('data') +config.bootstrap_cubes() +schema = config.load_schema() + +class AddAnyHook(hook.Hook): + __regid__ = 'addany' + category = 'cat1' + events = ('before_add_entity',) + def __call__(self): + raise HookCalled() + + +class HooksManagerTC(TestCase): + + def setUp(self): + """ called before each test from this class """ + self.vreg = mock_object(config=config, schema=schema) + self.o = hook.HooksRegistry(self.vreg) + + def test_register_bad_hook1(self): + class _Hook(hook.Hook): + events = ('before_add_entiti',) + ex = self.assertRaises(Exception, self.o.register, _Hook) + self.assertEquals(str(ex), 'bad event before_add_entiti on unittest_hook._Hook') + + def test_register_bad_hook2(self): + class _Hook(hook.Hook): + events = None + ex = self.assertRaises(Exception, self.o.register, _Hook) + self.assertEquals(str(ex), 'bad .events attribute None on unittest_hook._Hook') + + def test_register_bad_hook3(self): + class _Hook(hook.Hook): + events = 'before_add_entity' + ex = self.assertRaises(Exception, self.o.register, _Hook) + self.assertEquals(str(ex), 'bad event b on unittest_hook._Hook') + + def test_call_hook(self): + self.o.register(AddAnyHook) + cw = mock_object(vreg=self.vreg) + self.assertRaises(HookCalled, self.o.call_hooks, 'before_add_entity', cw) + self.o.call_hooks('before_delete_entity', cw) # nothing to call + config.disabled_hooks_categories.add('cat1') + self.o.call_hooks('before_add_entity', cw) # disabled hooks category, not called + config.disabled_hooks_categories.remove('cat1') + self.assertRaises(HookCalled, self.o.call_hooks, 'before_add_entity', cw) + self.o.unregister(AddAnyHook) + self.o.call_hooks('before_add_entity', cw) # nothing to call + + +class SystemHooksTC(CubicWebTC): + + def test_startup_shutdown(self): + import hooks # cubicweb/server/test/data/hooks.py + self.assertEquals(hooks.CALLED_EVENTS['server_startup'], True) + # don't actually call repository.shutdown ! + self.repo.hm.call_hooks('server_shutdown', repo=self.repo) + self.assertEquals(hooks.CALLED_EVENTS['server_shutdown'], True) + + def test_session_open_close(self): + import hooks # cubicweb/server/test/data/hooks.py + cnx = self.login('anon') + self.assertEquals(hooks.CALLED_EVENTS['session_open'], 'anon') + cnx.close() + self.assertEquals(hooks.CALLED_EVENTS['session_close'], 'anon') + + +# class RelationHookTC(TestCase): +# """testcase for relation hooks grouping""" +# def setUp(self): +# """ called before each test from this class """ +# self.o = HooksManager(schema) +# self.called = [] + +# def test_before_add_relation(self): +# """make sure before_xxx_relation hooks are called directly""" +# self.o.register(self._before_relation_hook, +# 'before_add_relation', 'concerne') +# self.assertEquals(self.called, []) +# self.o.call_hooks('before_add_relation', 'concerne', 'USER', +# 1, 'concerne', 2) +# self.assertEquals(self.called, [(1, 'concerne', 2)]) + +# def test_after_add_relation(self): +# """make sure after_xxx_relation hooks are deferred""" +# self.o.register(self._after_relation_hook, +# 'after_add_relation', 'concerne') +# self.assertEquals(self.called, []) +# self.o.call_hooks('after_add_relation', 'concerne', 'USER', +# 1, 'concerne', 2) +# self.o.call_hooks('after_add_relation', 'concerne', 'USER', +# 3, 'concerne', 4) +# self.assertEquals(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)]) + +# def test_before_delete_relation(self): +# """make sure before_xxx_relation hooks are called directly""" +# self.o.register(self._before_relation_hook, +# 'before_delete_relation', 'concerne') +# self.assertEquals(self.called, []) +# self.o.call_hooks('before_delete_relation', 'concerne', 'USER', +# 1, 'concerne', 2) +# self.assertEquals(self.called, [(1, 'concerne', 2)]) + +# def test_after_delete_relation(self): +# """make sure after_xxx_relation hooks are deferred""" +# self.o.register(self._after_relation_hook, +# 'after_delete_relation', 'concerne') +# self.o.call_hooks('after_delete_relation', 'concerne', 'USER', +# 1, 'concerne', 2) +# self.o.call_hooks('after_delete_relation', 'concerne', 'USER', +# 3, 'concerne', 4) +# self.assertEquals(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)]) + + +# def _before_relation_hook(self, pool, subject, r_type, object): +# self.called.append((subject, r_type, object)) + +# def _after_relation_hook(self, pool, subject, r_type, object): +# self.called.append((subject, r_type, object)) + + +if __name__ == '__main__': + unittest_main() diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_hookhelper.py --- a/server/test/unittest_hookhelper.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -"""unit/functional tests for cubicweb.server.hookhelper - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" - -from logilab.common.testlib import unittest_main -from cubicweb.devtools.apptest import RepositoryBasedTC - -from cubicweb.server.pool import LateOperation, Operation, SingleLastOperation -from cubicweb.server.hookhelper import * - - -class HookHelpersTC(RepositoryBasedTC): - - def setUp(self): - RepositoryBasedTC.setUp(self) - self.hm = self.repo.hm - - def test_late_operation(self): - session = self.session - l1 = LateOperation(session) - l2 = LateOperation(session) - l3 = Operation(session) - self.assertEquals(session.pending_operations, [l3, l1, l2]) - - def test_single_last_operation(self): - session = self.session - l0 = SingleLastOperation(session) - l1 = LateOperation(session) - l2 = LateOperation(session) - l3 = Operation(session) - self.assertEquals(session.pending_operations, [l3, l1, l2, l0]) - l4 = SingleLastOperation(session) - self.assertEquals(session.pending_operations, [l3, l1, l2, l4]) - - def test_global_operation_order(self): - from cubicweb.server import hooks, schemahooks - session = self.session - op1 = hooks.DelayedDeleteOp(session) - op2 = schemahooks.MemSchemaRDefDel(session) - # equivalent operation generated by op2 but replace it here by op3 so we - # can check the result... - op3 = schemahooks.MemSchemaNotifyChanges(session) - op4 = hooks.DelayedDeleteOp(session) - op5 = hooks.CheckORelationOp(session) - self.assertEquals(session.pending_operations, [op1, op2, op4, op5, op3]) - -if __name__ == '__main__': - unittest_main() diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_hooks.py --- a/server/test/unittest_hooks.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,516 +0,0 @@ -# -*- coding: utf-8 -*- -"""functional tests for core hooks - -note: most schemahooks.py hooks are actually tested in unittest_migrations.py -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" - -from logilab.common.testlib import TestCase, unittest_main - -from datetime import datetime - -from cubicweb import (ConnectionError, RepositoryError, ValidationError, - AuthenticationError, BadConnectionId) -from cubicweb.devtools.apptest import RepositoryBasedTC, get_versions - -from cubicweb.server.sqlutils import SQL_PREFIX -from cubicweb.server.repository import Repository - -orig_get_versions = Repository.get_versions - -def setup_module(*args): - Repository.get_versions = get_versions - -def teardown_module(*args): - Repository.get_versions = orig_get_versions - - - -class CoreHooksTC(RepositoryBasedTC): - - def test_delete_internal_entities(self): - self.assertRaises(RepositoryError, self.execute, - 'DELETE CWEType X WHERE X name "CWEType"') - self.assertRaises(RepositoryError, self.execute, - 'DELETE CWRType X WHERE X name "relation_type"') - self.assertRaises(RepositoryError, self.execute, - 'DELETE CWGroup X WHERE X name "owners"') - - def test_delete_required_relations_subject(self): - self.execute('INSERT CWUser X: X login "toto", X upassword "hop", X in_group Y ' - 'WHERE Y name "users"') - self.commit() - self.execute('DELETE X in_group Y WHERE X login "toto", Y name "users"') - self.assertRaises(ValidationError, self.commit) - self.execute('DELETE X in_group Y WHERE X login "toto"') - self.execute('SET X in_group Y WHERE X login "toto", Y name "guests"') - self.commit() - - def test_delete_required_relations_object(self): - self.skip('no sample in the schema ! YAGNI ? Kermaat ?') - - def test_static_vocabulary_check(self): - self.assertRaises(ValidationError, - self.execute, - 'SET X composite "whatever" WHERE X from_entity FE, FE name "CWUser", X relation_type RT, RT name "in_group"') - - def test_missing_required_relations_subject_inline(self): - # missing in_group relation - self.execute('INSERT CWUser X: X login "toto", X upassword "hop"') - self.assertRaises(ValidationError, - self.commit) - - def test_inlined(self): - self.assertEquals(self.repo.schema['sender'].inlined, True) - self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') - self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"') - eeid = self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P ' - 'WHERE Y is EmailAddress, P is EmailPart')[0][0] - self.execute('SET X sender Y WHERE X is Email, Y is EmailAddress') - rset = self.execute('Any S WHERE X sender S, X eid %s' % eeid) - self.assertEquals(len(rset), 1) - - def test_composite_1(self): - self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') - self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"') - self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P ' - 'WHERE Y is EmailAddress, P is EmailPart') - self.failUnless(self.execute('Email X WHERE X sender Y')) - self.commit() - self.execute('DELETE Email X') - rset = self.execute('Any X WHERE X is EmailPart') - self.assertEquals(len(rset), 1) - self.commit() - rset = self.execute('Any X WHERE X is EmailPart') - self.assertEquals(len(rset), 0) - - def test_composite_2(self): - self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') - self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"') - self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P ' - 'WHERE Y is EmailAddress, P is EmailPart') - self.commit() - self.execute('DELETE Email X') - self.execute('DELETE EmailPart X') - self.commit() - rset = self.execute('Any X WHERE X is EmailPart') - self.assertEquals(len(rset), 0) - - def test_composite_redirection(self): - self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') - self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"') - self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P ' - 'WHERE Y is EmailAddress, P is EmailPart') - self.execute('INSERT Email X: X messageid "<2345>", X subject "test2", X sender Y, X recipients Y ' - 'WHERE Y is EmailAddress') - self.commit() - self.execute('DELETE X parts Y WHERE X messageid "<1234>"') - self.execute('SET X parts Y WHERE X messageid "<2345>"') - self.commit() - rset = self.execute('Any X WHERE X is EmailPart') - self.assertEquals(len(rset), 1) - self.assertEquals(rset.get_entity(0, 0).reverse_parts[0].messageid, '<2345>') - - def test_unsatisfied_constraints(self): - self.execute('INSERT CWRelation X: X from_entity FE, X relation_type RT, X to_entity TE ' - 'WHERE FE name "Affaire", RT name "concerne", TE name "String"') - self.assertRaises(ValidationError, - self.commit) - - - def test_html_tidy_hook(self): - entity = self.execute('INSERT Affaire A: A descr_format "text/html", A descr "yo"').get_entity(0, 0) - self.assertEquals(entity.descr, u'yo') - entity = self.execute('INSERT Affaire A: A descr_format "text/html", A descr "yo"').get_entity(0, 0) - self.assertEquals(entity.descr, u'yo') - entity = self.execute('INSERT Affaire A: A descr_format "text/html", A descr "yo"').get_entity(0, 0) - self.assertEquals(entity.descr, u'yo') - entity = self.execute('INSERT Affaire A: A descr_format "text/html", A descr "R&D"').get_entity(0, 0) - self.assertEquals(entity.descr, u'R&D') - xml = u"
    c'est l'été" - entity = self.execute('INSERT Affaire A: A descr_format "text/html", A descr %(d)s', - {'d': xml}).get_entity(0, 0) - self.assertEquals(entity.descr, u"
    c'est l'été
    ") - - def test_nonregr_html_tidy_hook_no_update(self): - entity = self.execute('INSERT Affaire A: A descr_format "text/html", A descr "yo"').get_entity(0, 0) - self.assertEquals(entity.descr, u'yo') - self.execute('SET A ref "REF" WHERE A eid %s' % entity.eid) - entity = self.execute('Any A WHERE A eid %s' % entity.eid).get_entity(0, 0) - self.assertEquals(entity.descr, u'yo') - self.execute('SET A descr "R&D

    yo" WHERE A eid %s' % entity.eid) - entity = self.execute('Any A WHERE A eid %s' % entity.eid).get_entity(0, 0) - self.assertEquals(entity.descr, u'R&D

    yo

    ') - - - def test_metadata_cwuri(self): - eid = self.execute('INSERT Note X')[0][0] - cwuri = self.execute('Any U WHERE X eid %s, X cwuri U' % eid)[0][0] - self.assertEquals(cwuri, self.repo.config['base-url'] + 'eid/%s' % eid) - - def test_metadata_creation_modification_date(self): - _now = datetime.now() - eid = self.execute('INSERT Note X')[0][0] - creation_date, modification_date = self.execute('Any CD, MD WHERE X eid %s, ' - 'X creation_date CD, ' - 'X modification_date MD' % eid)[0] - self.assertEquals((creation_date - _now).seconds, 0) - self.assertEquals((modification_date - _now).seconds, 0) - - def test_metadata__date(self): - _now = datetime.now() - eid = self.execute('INSERT Note X')[0][0] - creation_date = self.execute('Any D WHERE X eid %s, X creation_date D' % eid)[0][0] - self.assertEquals((creation_date - _now).seconds, 0) - - def test_metadata_created_by(self): - eid = self.execute('INSERT Note X')[0][0] - self.commit() # fire operations - rset = self.execute('Any U WHERE X eid %s, X created_by U' % eid) - self.assertEquals(len(rset), 1) # make sure we have only one creator - self.assertEquals(rset[0][0], self.session.user.eid) - - def test_metadata_owned_by(self): - eid = self.execute('INSERT Note X')[0][0] - self.commit() # fire operations - rset = self.execute('Any U WHERE X eid %s, X owned_by U' % eid) - self.assertEquals(len(rset), 1) # make sure we have only one owner - self.assertEquals(rset[0][0], self.session.user.eid) - - -class UserGroupHooksTC(RepositoryBasedTC): - - def test_user_synchronization(self): - self.create_user('toto', password='hop', commit=False) - self.assertRaises(AuthenticationError, - self.repo.connect, u'toto', 'hop') - self.commit() - cnxid = self.repo.connect(u'toto', 'hop') - self.failIfEqual(cnxid, self.cnxid) - self.execute('DELETE CWUser X WHERE X login "toto"') - self.repo.execute(cnxid, 'State X') - self.commit() - self.assertRaises(BadConnectionId, - self.repo.execute, cnxid, 'State X') - - def test_user_group_synchronization(self): - user = self.session.user - self.assertEquals(user.groups, set(('managers',))) - self.execute('SET X in_group G WHERE X eid %s, G name "guests"' % user.eid) - self.assertEquals(user.groups, set(('managers',))) - self.commit() - self.assertEquals(user.groups, set(('managers', 'guests'))) - self.execute('DELETE X in_group G WHERE X eid %s, G name "guests"' % user.eid) - self.assertEquals(user.groups, set(('managers', 'guests'))) - self.commit() - self.assertEquals(user.groups, set(('managers',))) - - def test_user_composite_owner(self): - ueid = self.create_user('toto') - # composite of euser should be owned by the euser regardless of who created it - self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", U use_email X ' - 'WHERE U login "toto"') - self.commit() - self.assertEquals(self.execute('Any A WHERE X owned_by U, U use_email X,' - 'U login "toto", X address A')[0][0], - 'toto@logilab.fr') - - def test_no_created_by_on_deleted_entity(self): - eid = self.execute('INSERT EmailAddress X: X address "toto@logilab.fr"')[0][0] - self.execute('DELETE EmailAddress X WHERE X eid %s' % eid) - self.commit() - self.failIf(self.execute('Any X WHERE X created_by Y, X eid >= %(x)s', {'x': eid})) - - -class CWPropertyHooksTC(RepositoryBasedTC): - - def test_unexistant_eproperty(self): - ex = self.assertRaises(ValidationError, - self.execute, 'INSERT CWProperty X: X pkey "bla.bla", X value "hop", X for_user U') - self.assertEquals(ex.errors, {'pkey': 'unknown property key'}) - ex = self.assertRaises(ValidationError, - self.execute, 'INSERT CWProperty X: X pkey "bla.bla", X value "hop"') - self.assertEquals(ex.errors, {'pkey': 'unknown property key'}) - - def test_site_wide_eproperty(self): - ex = self.assertRaises(ValidationError, - self.execute, 'INSERT CWProperty X: X pkey "ui.site-title", X value "hop", X for_user U') - self.assertEquals(ex.errors, {'for_user': "site-wide property can't be set for user"}) - - def test_bad_type_eproperty(self): - ex = self.assertRaises(ValidationError, - self.execute, 'INSERT CWProperty X: X pkey "ui.language", X value "hop", X for_user U') - self.assertEquals(ex.errors, {'value': u'unauthorized value'}) - ex = self.assertRaises(ValidationError, - self.execute, 'INSERT CWProperty X: X pkey "ui.language", X value "hop"') - self.assertEquals(ex.errors, {'value': u'unauthorized value'}) - - -class SchemaHooksTC(RepositoryBasedTC): - - def test_duplicate_etype_error(self): - # check we can't add a CWEType or CWRType entity if it already exists one - # with the same name - # - # according to hook order, we'll get a repository or validation error - self.assertRaises((ValidationError, RepositoryError), - self.execute, 'INSERT CWEType X: X name "Societe"') - self.assertRaises((ValidationError, RepositoryError), - self.execute, 'INSERT CWRType X: X name "in_group"') - - def test_validation_unique_constraint(self): - self.assertRaises(ValidationError, - self.execute, 'INSERT CWUser X: X login "admin"') - try: - self.execute('INSERT CWUser X: X login "admin"') - except ValidationError, ex: - self.assertIsInstance(ex.entity, int) - self.assertEquals(ex.errors, {'login': 'the value "admin" is already used, use another one'}) - - -class SchemaModificationHooksTC(RepositoryBasedTC): - - def setUp(self): - if not hasattr(self, '_repo'): - # first initialization - repo = self.repo # set by the RepositoryBasedTC metaclass - # force to read schema from the database to get proper eid set on schema instances - repo.config._cubes = None - repo.fill_schema() - RepositoryBasedTC.setUp(self) - - def index_exists(self, etype, attr, unique=False): - dbhelper = self.session.pool.source('system').dbhelper - sqlcursor = self.session.pool['system'] - return dbhelper.index_exists(sqlcursor, SQL_PREFIX + etype, SQL_PREFIX + attr, unique=unique) - - def test_base(self): - schema = self.repo.schema - dbhelper = self.session.pool.source('system').dbhelper - sqlcursor = self.session.pool['system'] - self.failIf(schema.has_entity('Societe2')) - self.failIf(schema.has_entity('concerne2')) - # schema should be update on insertion (after commit) - self.execute('INSERT CWEType X: X name "Societe2", X description "", X final FALSE') - self.execute('INSERT CWRType X: X name "concerne2", X description "", X final FALSE, X symetric FALSE') - self.failIf(schema.has_entity('Societe2')) - self.failIf(schema.has_entity('concerne2')) - self.execute('SET X read_permission G WHERE X is CWEType, X name "Societe2", G is CWGroup') - self.execute('SET X read_permission G WHERE X is CWRType, X name "concerne2", G is CWGroup') - self.execute('SET X add_permission G WHERE X is CWEType, X name "Societe2", G is CWGroup, G name "managers"') - self.execute('SET X add_permission G WHERE X is CWRType, X name "concerne2", G is CWGroup, G name "managers"') - self.execute('SET X delete_permission G WHERE X is CWEType, X name "Societe2", G is CWGroup, G name "owners"') - self.execute('SET X delete_permission G WHERE X is CWRType, X name "concerne2", G is CWGroup, G name "owners"') - # have to commit before adding definition relations - self.commit() - self.failUnless(schema.has_entity('Societe2')) - self.failUnless(schema.has_relation('concerne2')) - self.execute('INSERT CWAttribute X: X cardinality "11", X defaultval "noname", X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F ' - 'WHERE RT name "nom", E name "Societe2", F name "String"') - concerne2_rdef_eid = self.execute( - 'INSERT CWRelation X: X cardinality "**", X relation_type RT, X from_entity E, X to_entity E ' - 'WHERE RT name "concerne2", E name "Societe2"')[0][0] - self.execute('INSERT CWRelation X: X cardinality "?*", X relation_type RT, X from_entity E, X to_entity C ' - 'WHERE RT name "comments", E name "Societe2", C name "Comment"') - self.failIf('nom' in schema['Societe2'].subject_relations()) - self.failIf('concerne2' in schema['Societe2'].subject_relations()) - self.failIf(self.index_exists('Societe2', 'nom')) - self.commit() - self.failUnless('nom' in schema['Societe2'].subject_relations()) - self.failUnless('concerne2' in schema['Societe2'].subject_relations()) - self.failUnless(self.index_exists('Societe2', 'nom')) - # now we should be able to insert and query Societe2 - s2eid = self.execute('INSERT Societe2 X: X nom "logilab"')[0][0] - self.execute('Societe2 X WHERE X nom "logilab"') - self.execute('SET X concerne2 X WHERE X nom "logilab"') - rset = self.execute('Any X WHERE X concerne2 Y') - self.assertEquals(rset.rows, [[s2eid]]) - # check that when a relation definition is deleted, existing relations are deleted - self.execute('INSERT CWRelation X: X cardinality "**", X relation_type RT, X from_entity E, X to_entity E ' - 'WHERE RT name "concerne2", E name "Societe"') - self.commit() - self.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid}, 'x') - self.commit() - self.failUnless('concerne2' in schema['Societe'].subject_relations()) - self.failIf('concerne2' in schema['Societe2'].subject_relations()) - self.failIf(self.execute('Any X WHERE X concerne2 Y')) - # schema should be cleaned on delete (after commit) - self.execute('DELETE CWEType X WHERE X name "Societe2"') - self.execute('DELETE CWRType X WHERE X name "concerne2"') - self.failUnless(self.index_exists('Societe2', 'nom')) - self.failUnless(schema.has_entity('Societe2')) - self.failUnless(schema.has_relation('concerne2')) - self.commit() - self.failIf(self.index_exists('Societe2', 'nom')) - self.failIf(schema.has_entity('Societe2')) - self.failIf(schema.has_entity('concerne2')) - - def test_is_instance_of_insertions(self): - seid = self.execute('INSERT SubDivision S: S nom "subdiv"')[0][0] - is_etypes = [etype for etype, in self.execute('Any ETN WHERE X eid %s, X is ET, ET name ETN' % seid)] - self.assertEquals(is_etypes, ['SubDivision']) - instanceof_etypes = [etype for etype, in self.execute('Any ETN WHERE X eid %s, X is_instance_of ET, ET name ETN' % seid)] - self.assertEquals(sorted(instanceof_etypes), ['Division', 'Societe', 'SubDivision']) - snames = [name for name, in self.execute('Any N WHERE S is Societe, S nom N')] - self.failIf('subdiv' in snames) - snames = [name for name, in self.execute('Any N WHERE S is Division, S nom N')] - self.failIf('subdiv' in snames) - snames = [name for name, in self.execute('Any N WHERE S is_instance_of Societe, S nom N')] - self.failUnless('subdiv' in snames) - snames = [name for name, in self.execute('Any N WHERE S is_instance_of Division, S nom N')] - self.failUnless('subdiv' in snames) - - - def test_perms_synchronization_1(self): - schema = self.repo.schema - self.assertEquals(schema['CWUser'].get_groups('read'), set(('managers', 'users'))) - self.failUnless(self.execute('Any X, Y WHERE X is CWEType, X name "CWUser", Y is CWGroup, Y name "users"')[0]) - self.execute('DELETE X read_permission Y WHERE X is CWEType, X name "CWUser", Y name "users"') - self.assertEquals(schema['CWUser'].get_groups('read'), set(('managers', 'users', ))) - self.commit() - self.assertEquals(schema['CWUser'].get_groups('read'), set(('managers', ))) - self.execute('SET X read_permission Y WHERE X is CWEType, X name "CWUser", Y name "users"') - self.commit() - self.assertEquals(schema['CWUser'].get_groups('read'), set(('managers', 'users',))) - - def test_perms_synchronization_2(self): - schema = self.repo.schema['in_group'] - self.assertEquals(schema.get_groups('read'), set(('managers', 'users', 'guests'))) - self.execute('DELETE X read_permission Y WHERE X is CWRType, X name "in_group", Y name "guests"') - self.assertEquals(schema.get_groups('read'), set(('managers', 'users', 'guests'))) - self.commit() - self.assertEquals(schema.get_groups('read'), set(('managers', 'users'))) - self.execute('SET X read_permission Y WHERE X is CWRType, X name "in_group", Y name "guests"') - self.assertEquals(schema.get_groups('read'), set(('managers', 'users'))) - self.commit() - self.assertEquals(schema.get_groups('read'), set(('managers', 'users', 'guests'))) - - def test_nonregr_user_edit_itself(self): - ueid = self.session.user.eid - groupeids = [eid for eid, in self.execute('CWGroup G WHERE G name in ("managers", "users")')] - self.execute('DELETE X in_group Y WHERE X eid %s' % ueid) - self.execute('SET X surname "toto" WHERE X eid %s' % ueid) - self.execute('SET X in_group Y WHERE X eid %s, Y name "managers"' % ueid) - self.commit() - eeid = self.execute('Any X WHERE X is CWEType, X name "CWEType"')[0][0] - self.execute('DELETE X read_permission Y WHERE X eid %s' % eeid) - self.execute('SET X final FALSE WHERE X eid %s' % eeid) - self.execute('SET X read_permission Y WHERE X eid %s, Y eid in (%s, %s)' - % (eeid, groupeids[0], groupeids[1])) - self.commit() - self.execute('Any X WHERE X is CWEType, X name "CWEType"') - - # schema modification hooks tests ######################################### - - def test_uninline_relation(self): - dbhelper = self.session.pool.source('system').dbhelper - sqlcursor = self.session.pool['system'] - # Personne inline2 Affaire inline - # insert a person without inline2 relation (not mandatory) - self.execute('INSERT Personne X: X nom "toto"') - peid = self.execute('INSERT Personne X: X nom "tutu"')[0][0] - aeid = self.execute('INSERT Affaire X: X ref "tata"')[0][0] - self.execute('SET X inline2 Y WHERE X eid %(x)s, Y eid %(y)s', {'x': peid, 'y': aeid}) - self.failUnless(self.schema['inline2'].inlined) - try: - try: - self.execute('SET X inlined FALSE WHERE X name "inline2"') - self.failUnless(self.schema['inline2'].inlined) - self.commit() - self.failIf(self.schema['inline2'].inlined) - self.failIf(self.index_exists('Personne', 'inline2')) - rset = self.execute('Any X, Y WHERE X inline2 Y') - self.assertEquals(len(rset), 1) - self.assertEquals(rset.rows[0], [peid, aeid]) - except: - import traceback - traceback.print_exc() - raise - finally: - self.execute('SET X inlined TRUE WHERE X name "inline2"') - self.failIf(self.schema['inline2'].inlined) - self.commit() - self.failUnless(self.schema['inline2'].inlined) - self.failUnless(self.index_exists('Personne', 'inline2')) - rset = self.execute('Any X, Y WHERE X inline2 Y') - self.assertEquals(len(rset), 1) - self.assertEquals(rset.rows[0], [peid, aeid]) - - def test_indexed_change(self): - dbhelper = self.session.pool.source('system').dbhelper - sqlcursor = self.session.pool['system'] - try: - self.execute('SET X indexed TRUE WHERE X relation_type R, R name "sujet"') - self.failIf(self.schema['sujet'].rproperty('Affaire', 'String', 'indexed')) - self.failIf(self.index_exists('Affaire', 'sujet')) - self.commit() - self.failUnless(self.schema['sujet'].rproperty('Affaire', 'String', 'indexed')) - self.failUnless(self.index_exists('Affaire', 'sujet')) - finally: - self.execute('SET X indexed FALSE WHERE X relation_type R, R name "sujet"') - self.failUnless(self.schema['sujet'].rproperty('Affaire', 'String', 'indexed')) - self.failUnless(self.index_exists('Affaire', 'sujet')) - self.commit() - self.failIf(self.schema['sujet'].rproperty('Affaire', 'String', 'indexed')) - self.failIf(self.index_exists('Affaire', 'sujet')) - - def test_unique_change(self): - dbhelper = self.session.pool.source('system').dbhelper - sqlcursor = self.session.pool['system'] - try: - try: - self.execute('INSERT CWConstraint X: X cstrtype CT, DEF constrained_by X ' - 'WHERE CT name "UniqueConstraint", DEF relation_type RT, DEF from_entity E,' - 'RT name "sujet", E name "Affaire"') - self.failIf(self.schema['Affaire'].has_unique_values('sujet')) - self.failIf(self.index_exists('Affaire', 'sujet', unique=True)) - self.commit() - self.failUnless(self.schema['Affaire'].has_unique_values('sujet')) - self.failUnless(self.index_exists('Affaire', 'sujet', unique=True)) - except: - import traceback - traceback.print_exc() - raise - finally: - self.execute('DELETE DEF constrained_by X WHERE X cstrtype CT, ' - 'CT name "UniqueConstraint", DEF relation_type RT, DEF from_entity E,' - 'RT name "sujet", E name "Affaire"') - self.failUnless(self.schema['Affaire'].has_unique_values('sujet')) - self.failUnless(self.index_exists('Affaire', 'sujet', unique=True)) - self.commit() - self.failIf(self.schema['Affaire'].has_unique_values('sujet')) - self.failIf(self.index_exists('Affaire', 'sujet', unique=True)) - - def test_required_change_1(self): - self.execute('SET DEF cardinality "?1" ' - 'WHERE DEF relation_type RT, DEF from_entity E,' - 'RT name "nom", E name "Personne"') - self.commit() - # should now be able to add personne without nom - self.execute('INSERT Personne X') - self.commit() - - def test_required_change_2(self): - self.execute('SET DEF cardinality "11" ' - 'WHERE DEF relation_type RT, DEF from_entity E,' - 'RT name "prenom", E name "Personne"') - self.commit() - # should not be able anymore to add personne without prenom - self.assertRaises(ValidationError, self.execute, 'INSERT Personne X: X nom "toto"') - self.execute('SET DEF cardinality "?1" ' - 'WHERE DEF relation_type RT, DEF from_entity E,' - 'RT name "prenom", E name "Personne"') - self.commit() - - - def test_add_attribute_to_base_class(self): - self.execute('INSERT CWAttribute X: X cardinality "11", X defaultval "noname", X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F ' - 'WHERE RT name "nom", E name "BaseTransition", F name "String"') - self.commit() - self.schema.rebuild_infered_relations() - self.failUnless('Transition' in self.schema['nom'].subjects()) - self.failUnless('WorkflowTransition' in self.schema['nom'].subjects()) - self.execute('Any X WHERE X is_instance_of BaseTransition, X nom "hop"') - -if __name__ == '__main__': - unittest_main() diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_hooksmanager.py --- a/server/test/unittest_hooksmanager.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,179 +0,0 @@ -"""unit tests for the hooks manager -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" - -from logilab.common.testlib import TestCase, unittest_main - -from cubicweb.server.hooksmanager import HooksManager, Hook -from cubicweb.devtools import TestServerConfiguration -from cubicweb.devtools.apptest import RepositoryBasedTC - -class HookCalled(Exception): pass - -config = TestServerConfiguration('data') -config.bootstrap_cubes() -schema = config.load_schema() - -class HooksManagerTC(TestCase): - args = (None,) - kwargs = {'a': 1} - - def setUp(self): - """ called before each test from this class """ - self.o = HooksManager(schema) - - def test_register_hook_raise_keyerror(self): - self.assertRaises(AssertionError, - self.o.register_hook, self._hook, 'before_add_entiti') - self.assertRaises(AssertionError, - self.o.register_hook, self._hook, 'session_login', 'CWEType') - self.assertRaises(AssertionError, - self.o.register_hook, self._hook, 'session_logout', 'CWEType') - self.assertRaises(AssertionError, - self.o.register_hook, self._hook, 'server_startup', 'CWEType') - self.assertRaises(AssertionError, - self.o.register_hook, self._hook, 'server_shutdown', 'CWEType') - - def test_register_hook1(self): - self.o.register_hook(self._hook, 'before_add_entity') - self.o.register_hook(self._hook, 'before_delete_entity', 'Personne') - self._test_called_hooks() - - def test_register_hook2(self): - self.o.register_hook(self._hook, 'before_add_entity', '') - self.o.register_hook(self._hook, 'before_delete_entity', 'Personne') - self._test_called_hooks() - - def test_register_hook3(self): - self.o.register_hook(self._hook, 'before_add_entity', None) - self.o.register_hook(self._hook, 'before_delete_entity', 'Personne') - self._test_called_hooks() - - def test_register_hooks(self): - self.o.register_hooks({'before_add_entity' : {'': [self._hook]}, - 'before_delete_entity' : {'Personne': [self._hook]}, - }) - self._test_called_hooks() - - def test_unregister_hook(self): - self.o.register_hook(self._hook, 'after_delete_entity', 'Personne') - self.assertRaises(HookCalled, - self.o.call_hooks, 'after_delete_entity', 'Personne', - *self.args, **self.kwargs) - self.o.unregister_hook(self._hook, 'after_delete_entity', 'Personne') - # no hook should be called there - self.o.call_hooks('after_delete_entity', 'Personne') - - - def _test_called_hooks(self): - self.assertRaises(HookCalled, - self.o.call_hooks, 'before_add_entity', '', - *self.args, **self.kwargs) - self.assertRaises(HookCalled, - self.o.call_hooks, 'before_add_entity', None, - *self.args, **self.kwargs) - self.assertRaises(HookCalled, - self.o.call_hooks, 'before_add_entity', 'Personne', - *self.args, **self.kwargs) - self.assertRaises(HookCalled, - self.o.call_hooks, 'before_delete_entity', 'Personne', - *self.args, **self.kwargs) - # no hook should be called there - self.o.call_hooks('before_delete_entity', None) - self.o.call_hooks('before_delete_entity', 'Societe') - - - def _hook(self, *args, **kwargs): - # check arguments - self.assertEqual(args, self.args) - self.assertEqual(kwargs, self.kwargs) - raise HookCalled() - - -class RelationHookTC(TestCase): - """testcase for relation hooks grouping""" - def setUp(self): - """ called before each test from this class """ - self.o = HooksManager(schema) - self.called = [] - - def test_before_add_relation(self): - """make sure before_xxx_relation hooks are called directly""" - self.o.register_hook(self._before_relation_hook, - 'before_add_relation', 'concerne') - self.assertEquals(self.called, []) - self.o.call_hooks('before_add_relation', 'concerne', 'USER', - 1, 'concerne', 2) - self.assertEquals(self.called, [(1, 'concerne', 2)]) - - def test_after_add_relation(self): - """make sure after_xxx_relation hooks are deferred""" - self.o.register_hook(self._after_relation_hook, - 'after_add_relation', 'concerne') - self.assertEquals(self.called, []) - self.o.call_hooks('after_add_relation', 'concerne', 'USER', - 1, 'concerne', 2) - self.o.call_hooks('after_add_relation', 'concerne', 'USER', - 3, 'concerne', 4) - self.assertEquals(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)]) - - def test_before_delete_relation(self): - """make sure before_xxx_relation hooks are called directly""" - self.o.register_hook(self._before_relation_hook, - 'before_delete_relation', 'concerne') - self.assertEquals(self.called, []) - self.o.call_hooks('before_delete_relation', 'concerne', 'USER', - 1, 'concerne', 2) - self.assertEquals(self.called, [(1, 'concerne', 2)]) - - def test_after_delete_relation(self): - """make sure after_xxx_relation hooks are deferred""" - self.o.register_hook(self._after_relation_hook, - 'after_delete_relation', 'concerne') - self.o.call_hooks('after_delete_relation', 'concerne', 'USER', - 1, 'concerne', 2) - self.o.call_hooks('after_delete_relation', 'concerne', 'USER', - 3, 'concerne', 4) - self.assertEquals(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)]) - - - def _before_relation_hook(self, pool, subject, r_type, object): - self.called.append((subject, r_type, object)) - - def _after_relation_hook(self, pool, subject, r_type, object): - self.called.append((subject, r_type, object)) - - -class SystemHooksTC(RepositoryBasedTC): - - def test_startup_shutdown(self): - import hooks # cubicweb/server/test/data/hooks.py - self.assertEquals(hooks.CALLED_EVENTS['server_startup'], True) - # don't actually call repository.shutdown ! - self.repo.hm.call_hooks('server_shutdown', repo=None) - self.assertEquals(hooks.CALLED_EVENTS['server_shutdown'], True) - - def test_session_open_close(self): - import hooks # cubicweb/server/test/data/hooks.py - cnx = self.login('anon') - self.assertEquals(hooks.CALLED_EVENTS['session_open'], 'anon') - cnx.close() - self.assertEquals(hooks.CALLED_EVENTS['session_close'], 'anon') - - -from itertools import repeat - -class MyHook(Hook): - schema = schema # set for actual hooks at registration time - events = ('whatever', 'another') - accepts = ('Societe', 'Division') - -class HookTC(RepositoryBasedTC): - def test_inheritance(self): - self.assertEquals(list(MyHook.register_to()), - zip(repeat('whatever'), ('Societe', 'Division', 'SubDivision')) - + zip(repeat('another'), ('Societe', 'Division', 'SubDivision'))) - - -if __name__ == '__main__': - unittest_main() diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_ldapuser.py --- a/server/test/unittest_ldapuser.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/unittest_ldapuser.py Wed Jan 20 10:13:45 2010 +0100 @@ -9,8 +9,8 @@ import socket from logilab.common.testlib import TestCase, unittest_main, mock_object -from cubicweb.devtools import init_test_database, TestServerConfiguration -from cubicweb.devtools.apptest import RepositoryBasedTC +from cubicweb.devtools import TestServerConfiguration +from cubicweb.devtools.testlib import CubicWebTC from cubicweb.devtools.repotest import RQLGeneratorTC from cubicweb.server.sources.ldapuser import * @@ -23,7 +23,7 @@ ADIM = 'adimascio' -def nopwd_authenticate(self, session, login, upassword): +def nopwd_authenticate(self, session, login, password): """used to monkey patch the source to get successful authentication without upassword checking """ @@ -44,36 +44,31 @@ -config = TestServerConfiguration('data') -config.sources_file = lambda : 'data/sourcesldap' -repo, cnx = init_test_database('sqlite', config=config) - -class LDAPUserSourceTC(RepositoryBasedTC): - repo, cnx = repo, cnx +class LDAPUserSourceTC(CubicWebTC): + config = TestServerConfiguration('data') + config.sources_file = lambda : 'data/sourcesldap' def patch_authenticate(self): self._orig_authenticate = LDAPUserSource.authenticate LDAPUserSource.authenticate = nopwd_authenticate - def setUp(self): - self._prepare() + def setup_database(self): # XXX: need this first query else we get 'database is locked' from # sqlite since it doesn't support multiple connections on the same # database # so doing, ldap inserted users don't get removed between each test - rset = self.execute('CWUser X') - self.commit() + rset = self.sexecute('CWUser X') # check we get some users from ldap self.assert_(len(rset) > 1) - self.maxeid = self.execute('Any MAX(X)')[0][0] def tearDown(self): if hasattr(self, '_orig_authenticate'): LDAPUserSource.authenticate = self._orig_authenticate - RepositoryBasedTC.tearDown(self) + CubicWebTC.tearDown(self) def test_authenticate(self): source = self.repo.sources_by_uri['ldapuser'] + self.session.set_pool() self.assertRaises(AuthenticationError, source.authenticate, self.session, 'toto', 'toto') @@ -83,7 +78,7 @@ def test_base(self): # check a known one - e = self.execute('CWUser X WHERE X login %(login)s', {'login': SYT}).get_entity(0, 0) + e = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT}).get_entity(0, 0) self.assertEquals(e.login, SYT) e.complete() self.assertEquals(e.creation_date, None) @@ -95,79 +90,79 @@ self.assertEquals(e.created_by, ()) self.assertEquals(e.primary_email[0].address, 'Sylvain Thenault') # email content should be indexed on the user - rset = self.execute('CWUser X WHERE X has_text "thenault"') + rset = self.sexecute('CWUser X WHERE X has_text "thenault"') self.assertEquals(rset.rows, [[e.eid]]) def test_not(self): - eid = self.execute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] - rset = self.execute('CWUser X WHERE NOT X eid %s' % eid) + eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] + rset = self.sexecute('CWUser X WHERE NOT X eid %s' % eid) self.assert_(rset) self.assert_(not eid in (r[0] for r in rset)) def test_multiple(self): - seid = self.execute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] - aeid = self.execute('CWUser X WHERE X login %(login)s', {'login': ADIM})[0][0] - rset = self.execute('CWUser X, Y WHERE X login %(syt)s, Y login %(adim)s', + seid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] + aeid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': ADIM})[0][0] + rset = self.sexecute('CWUser X, Y WHERE X login %(syt)s, Y login %(adim)s', {'syt': SYT, 'adim': ADIM}) self.assertEquals(rset.rows, [[seid, aeid]]) - rset = self.execute('Any X,Y,L WHERE X login L, X login %(syt)s, Y login %(adim)s', + rset = self.sexecute('Any X,Y,L WHERE X login L, X login %(syt)s, Y login %(adim)s', {'syt': SYT, 'adim': ADIM}) self.assertEquals(rset.rows, [[seid, aeid, SYT]]) def test_in(self): - seid = self.execute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] - aeid = self.execute('CWUser X WHERE X login %(login)s', {'login': ADIM})[0][0] - rset = self.execute('Any X,L ORDERBY L WHERE X login IN("%s", "%s"), X login L' % (SYT, ADIM)) + seid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] + aeid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': ADIM})[0][0] + rset = self.sexecute('Any X,L ORDERBY L WHERE X login IN("%s", "%s"), X login L' % (SYT, ADIM)) self.assertEquals(rset.rows, [[aeid, ADIM], [seid, SYT]]) def test_relations(self): - eid = self.execute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] - rset = self.execute('Any X,E WHERE X is CWUser, X login L, X primary_email E') + eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] + rset = self.sexecute('Any X,E WHERE X is CWUser, X login L, X primary_email E') self.assert_(eid in (r[0] for r in rset)) - rset = self.execute('Any X,L,E WHERE X is CWUser, X login L, X primary_email E') + rset = self.sexecute('Any X,L,E WHERE X is CWUser, X login L, X primary_email E') self.assert_(SYT in (r[1] for r in rset)) def test_count(self): - nbusers = self.execute('Any COUNT(X) WHERE X is CWUser')[0][0] + nbusers = self.sexecute('Any COUNT(X) WHERE X is CWUser')[0][0] # just check this is a possible number self.assert_(nbusers > 1, nbusers) self.assert_(nbusers < 30, nbusers) def test_upper(self): - eid = self.execute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] - rset = self.execute('Any UPPER(L) WHERE X eid %s, X login L' % eid) + eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] + rset = self.sexecute('Any UPPER(L) WHERE X eid %s, X login L' % eid) self.assertEquals(rset[0][0], SYT.upper()) def test_unknown_attr(self): - eid = self.execute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] - rset = self.execute('Any L,C,M WHERE X eid %s, X login L, ' + eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] + rset = self.sexecute('Any L,C,M WHERE X eid %s, X login L, ' 'X creation_date C, X modification_date M' % eid) self.assertEquals(rset[0][0], SYT) self.assertEquals(rset[0][1], None) self.assertEquals(rset[0][2], None) def test_sort(self): - logins = [l for l, in self.execute('Any L ORDERBY L WHERE X login L')] + logins = [l for l, in self.sexecute('Any L ORDERBY L WHERE X login L')] self.assertEquals(logins, sorted(logins)) def test_lower_sort(self): - logins = [l for l, in self.execute('Any L ORDERBY lower(L) WHERE X login L')] + logins = [l for l, in self.sexecute('Any L ORDERBY lower(L) WHERE X login L')] self.assertEquals(logins, sorted(logins)) def test_or(self): - rset = self.execute('DISTINCT Any X WHERE X login %(login)s OR (X in_group G, G name "managers")', + rset = self.sexecute('DISTINCT Any X WHERE X login %(login)s OR (X in_group G, G name "managers")', {'login': SYT}) self.assertEquals(len(rset), 2, rset.rows) # syt + admin def test_nonregr_set_owned_by(self): # test that when a user coming from ldap is triggering a transition # the related TrInfo has correct owner information - self.execute('SET X in_group G WHERE X login %(syt)s, G name "managers"', {'syt': SYT}) + self.sexecute('SET X in_group G WHERE X login %(syt)s, G name "managers"', {'syt': SYT}) self.commit() - syt = self.execute('CWUser X WHERE X login %(login)s', {'login': SYT}).get_entity(0, 0) + syt = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT}).get_entity(0, 0) self.assertEquals([g.name for g in syt.in_group], ['managers', 'users']) self.patch_authenticate() - cnx = self.login(SYT, 'dummypassword') + cnx = self.login(SYT, password='dummypassword') cu = cnx.cursor() adim = cu.execute('CWUser X WHERE X login %(login)s', {'login': ADIM}).get_entity(0, 0) adim.fire_transition('deactivate') @@ -178,7 +173,7 @@ trinfo = adim.latest_trinfo() self.assertEquals(trinfo.owned_by[0].login, SYT) # select from_state to skip the user's creation TrInfo - rset = self.execute('Any U ORDERBY D DESC WHERE WF wf_info_for X,' + rset = self.sexecute('Any U ORDERBY D DESC WHERE WF wf_info_for X,' 'WF creation_date D, WF from_state FS,' 'WF owned_by U?, X eid %(x)s', {'x': adim.eid}, 'x') @@ -186,77 +181,78 @@ finally: # restore db state self.restore_connection() - adim = self.execute('CWUser X WHERE X login %(login)s', {'login': ADIM}).get_entity(0, 0) + adim = self.sexecute('CWUser X WHERE X login %(login)s', {'login': ADIM}).get_entity(0, 0) adim.fire_transition('activate') - self.execute('DELETE X in_group G WHERE X login %(syt)s, G name "managers"', {'syt': SYT}) + self.sexecute('DELETE X in_group G WHERE X login %(syt)s, G name "managers"', {'syt': SYT}) def test_same_column_names(self): - self.execute('Any X, Y WHERE X copain Y, X login "comme", Y login "cochon"') + self.sexecute('Any X, Y WHERE X copain Y, X login "comme", Y login "cochon"') def test_multiple_entities_from_different_sources(self): - self.create_user('cochon') - self.failUnless(self.execute('Any X,Y WHERE X login %(syt)s, Y login "cochon"', {'syt': SYT})) + self.create_user('cochon', req=self.session) + self.failUnless(self.sexecute('Any X,Y WHERE X login %(syt)s, Y login "cochon"', {'syt': SYT})) def test_exists1(self): - self.add_entity('CWGroup', name=u'bougloup1') - self.add_entity('CWGroup', name=u'bougloup2') - self.execute('SET U in_group G WHERE G name ~= "bougloup%", U login "admin"') - self.execute('SET U in_group G WHERE G name = "bougloup1", U login %(syt)s', {'syt': SYT}) - rset = self.execute('Any L,SN ORDERBY L WHERE X in_state S, S name SN, X login L, EXISTS(X in_group G, G name ~= "bougloup%")') + self.session.set_pool() + self.session.create_entity('CWGroup', name=u'bougloup1') + self.session.create_entity('CWGroup', name=u'bougloup2') + self.sexecute('SET U in_group G WHERE G name ~= "bougloup%", U login "admin"') + self.sexecute('SET U in_group G WHERE G name = "bougloup1", U login %(syt)s', {'syt': SYT}) + rset = self.sexecute('Any L,SN ORDERBY L WHERE X in_state S, S name SN, X login L, EXISTS(X in_group G, G name ~= "bougloup%")') self.assertEquals(rset.rows, [['admin', 'activated'], [SYT, 'activated']]) def test_exists2(self): - self.create_user('comme') - self.create_user('cochon') - self.execute('SET X copain Y WHERE X login "comme", Y login "cochon"') - rset = self.execute('Any GN ORDERBY GN WHERE X in_group G, G name GN, (G name "managers" OR EXISTS(X copain T, T login in ("comme", "cochon")))') + self.create_user('comme', req=self.session) + self.create_user('cochon', req=self.session) + self.sexecute('SET X copain Y WHERE X login "comme", Y login "cochon"') + rset = self.sexecute('Any GN ORDERBY GN WHERE X in_group G, G name GN, (G name "managers" OR EXISTS(X copain T, T login in ("comme", "cochon")))') self.assertEquals(rset.rows, [['managers'], ['users']]) def test_exists3(self): - self.create_user('comme') - self.create_user('cochon') - self.execute('SET X copain Y WHERE X login "comme", Y login "cochon"') - self.failUnless(self.execute('Any X, Y WHERE X copain Y, X login "comme", Y login "cochon"')) - self.execute('SET X copain Y WHERE X login %(syt)s, Y login "cochon"', {'syt': SYT}) - self.failUnless(self.execute('Any X, Y WHERE X copain Y, X login %(syt)s, Y login "cochon"', {'syt': SYT})) - rset = self.execute('Any GN,L WHERE X in_group G, X login L, G name GN, G name "managers" OR EXISTS(X copain T, T login in ("comme", "cochon"))') + self.create_user('comme', req=self.session) + self.create_user('cochon', req=self.session) + self.sexecute('SET X copain Y WHERE X login "comme", Y login "cochon"') + self.failUnless(self.sexecute('Any X, Y WHERE X copain Y, X login "comme", Y login "cochon"')) + self.sexecute('SET X copain Y WHERE X login %(syt)s, Y login "cochon"', {'syt': SYT}) + self.failUnless(self.sexecute('Any X, Y WHERE X copain Y, X login %(syt)s, Y login "cochon"', {'syt': SYT})) + rset = self.sexecute('Any GN,L WHERE X in_group G, X login L, G name GN, G name "managers" OR EXISTS(X copain T, T login in ("comme", "cochon"))') self.assertEquals(sorted(rset.rows), [['managers', 'admin'], ['users', 'comme'], ['users', SYT]]) def test_exists4(self): - self.create_user('comme') - self.create_user('cochon', groups=('users', 'guests')) - self.create_user('billy') - self.execute('SET X copain Y WHERE X login "comme", Y login "cochon"') - self.execute('SET X copain Y WHERE X login "cochon", Y login "cochon"') - self.execute('SET X copain Y WHERE X login "comme", Y login "billy"') - self.execute('SET X copain Y WHERE X login %(syt)s, Y login "billy"', {'syt': SYT}) + self.create_user('comme', req=self.session) + self.create_user('cochon', groups=('users', 'guests'), req=self.session) + self.create_user('billy', req=self.session) + self.sexecute('SET X copain Y WHERE X login "comme", Y login "cochon"') + self.sexecute('SET X copain Y WHERE X login "cochon", Y login "cochon"') + self.sexecute('SET X copain Y WHERE X login "comme", Y login "billy"') + self.sexecute('SET X copain Y WHERE X login %(syt)s, Y login "billy"', {'syt': SYT}) # search for group name, login where # CWUser copain with "comme" or "cochon" AND same login as the copain # OR # CWUser in_state activated AND not copain with billy # # SO we expect everybody but "comme" and "syt" - rset= self.execute('Any GN,L WHERE X in_group G, X login L, G name GN, ' + rset= self.sexecute('Any GN,L WHERE X in_group G, X login L, G name GN, ' 'EXISTS(X copain T, T login L, T login in ("comme", "cochon")) OR ' 'EXISTS(X in_state S, S name "activated", NOT X copain T2, T2 login "billy")') - all = self.execute('Any GN, L WHERE X in_group G, X login L, G name GN') + all = self.sexecute('Any GN, L WHERE X in_group G, X login L, G name GN') all.rows.remove(['users', 'comme']) all.rows.remove(['users', SYT]) self.assertEquals(sorted(rset.rows), sorted(all.rows)) def test_exists5(self): - self.create_user('comme') - self.create_user('cochon', groups=('users', 'guests')) - self.create_user('billy') - self.execute('SET X copain Y WHERE X login "comme", Y login "cochon"') - self.execute('SET X copain Y WHERE X login "cochon", Y login "cochon"') - self.execute('SET X copain Y WHERE X login "comme", Y login "billy"') - self.execute('SET X copain Y WHERE X login %(syt)s, Y login "cochon"', {'syt': SYT}) - rset= self.execute('Any L WHERE X login L, ' + self.create_user('comme', req=self.session) + self.create_user('cochon', groups=('users', 'guests'), req=self.session) + self.create_user('billy', req=self.session) + self.sexecute('SET X copain Y WHERE X login "comme", Y login "cochon"') + self.sexecute('SET X copain Y WHERE X login "cochon", Y login "cochon"') + self.sexecute('SET X copain Y WHERE X login "comme", Y login "billy"') + self.sexecute('SET X copain Y WHERE X login %(syt)s, Y login "cochon"', {'syt': SYT}) + rset= self.sexecute('Any L WHERE X login L, ' 'EXISTS(X copain T, T login in ("comme", "cochon")) AND ' 'NOT EXISTS(X copain T2, T2 login "billy")') self.assertEquals(sorted(rset.rows), [['cochon'], [SYT]]) - rset= self.execute('Any GN,L WHERE X in_group G, X login L, G name GN, ' + rset= self.sexecute('Any GN,L WHERE X in_group G, X login L, G name GN, ' 'EXISTS(X copain T, T login in ("comme", "cochon")) AND ' 'NOT EXISTS(X copain T2, T2 login "billy")') self.assertEquals(sorted(rset.rows), [['guests', 'cochon'], @@ -264,18 +260,20 @@ ['users', SYT]]) def test_cd_restriction(self): - rset = self.execute('CWUser X WHERE X creation_date > "2009-02-01"') - self.assertEquals(len(rset), 2) # admin/anon but no ldap user since it doesn't support creation_date + rset = self.sexecute('CWUser X WHERE X creation_date > "2009-02-01"') + # admin/anon but no ldap user since it doesn't support creation_date + self.assertEquals(sorted(e.login for e in rset.entities()), + ['admin', 'anon']) def test_union(self): - afeids = self.execute('State X') - ueids = self.execute('CWUser X') - rset = self.execute('(Any X WHERE X is State) UNION (Any X WHERE X is CWUser)') + afeids = self.sexecute('State X') + ueids = self.sexecute('CWUser X') + rset = self.sexecute('(Any X WHERE X is State) UNION (Any X WHERE X is CWUser)') self.assertEquals(sorted(r[0] for r in rset.rows), sorted(r[0] for r in afeids + ueids)) def _init_security_test(self): - self.create_user('iaminguestsgrouponly', groups=('guests',)) + self.create_user('iaminguestsgrouponly', groups=('guests',), req=self.session) cnx = self.login('iaminguestsgrouponly') return cnx.cursor() @@ -301,33 +299,33 @@ self.assertEquals(rset.rows, [[None]]) def test_nonregr1(self): - self.execute('Any X,AA ORDERBY AA DESC WHERE E eid %(x)s, E owned_by X, ' + self.sexecute('Any X,AA ORDERBY AA DESC WHERE E eid %(x)s, E owned_by X, ' 'X modification_date AA', - {'x': cnx.user(self.session).eid}) + {'x': self.session.user.eid}) def test_nonregr2(self): - self.execute('Any X,L,AA WHERE E eid %(x)s, E owned_by X, ' + self.sexecute('Any X,L,AA WHERE E eid %(x)s, E owned_by X, ' 'X login L, X modification_date AA', - {'x': cnx.user(self.session).eid}) + {'x': self.session.user.eid}) def test_nonregr3(self): - self.execute('Any X,AA ORDERBY AA DESC WHERE E eid %(x)s, ' + self.sexecute('Any X,AA ORDERBY AA DESC WHERE E eid %(x)s, ' 'X modification_date AA', - {'x': cnx.user(self.session).eid}) + {'x': self.session.user.eid}) def test_nonregr4(self): - emaileid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org"')[0][0] - self.execute('Any X,AA WHERE X use_email Y, Y eid %(x)s, X modification_date AA', + emaileid = self.sexecute('INSERT EmailAddress X: X address "toto@logilab.org"')[0][0] + self.sexecute('Any X,AA WHERE X use_email Y, Y eid %(x)s, X modification_date AA', {'x': emaileid}) def test_nonregr5(self): # original jpl query: # Any X, NOW - CD, P WHERE P is Project, U interested_in P, U is CWUser, U login "sthenault", X concerns P, X creation_date CD ORDERBY CD DESC LIMIT 5 rql = 'Any X, NOW - CD, P ORDERBY CD DESC LIMIT 5 WHERE P bookmarked_by U, U login "%s", P is X, X creation_date CD' % self.session.user.login - self.execute(rql, )#{'x': }) + self.sexecute(rql, )#{'x': }) def test_nonregr6(self): - self.execute('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File ' + self.sexecute('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File ' 'WITH U,UL BEING (Any U,UL WHERE ME eid %(x)s, (EXISTS(U identity ME) ' 'OR (EXISTS(U in_group G, G name IN("managers", "staff")))) ' 'OR (EXISTS(U in_group H, ME in_group H, NOT H name "users")), U login UL, U is CWUser)', @@ -368,6 +366,9 @@ res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]]) self.assertEquals(res, [[1, 5], [2, 4], [3, 6]]) +# XXX +LDAPUserSourceTC._init_repo() +repo = LDAPUserSourceTC.repo class RQL2LDAPFilterTC(RQLGeneratorTC): schema = repo.schema diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_migractions.py --- a/server/test/unittest_migractions.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/unittest_migractions.py Wed Jan 20 10:13:45 2010 +0100 @@ -2,13 +2,14 @@ :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ +from copy import deepcopy from datetime import date from os.path import join from logilab.common.testlib import TestCase, unittest_main from cubicweb import ConfigurationError -from cubicweb.devtools.apptest import RepositoryBasedTC, get_versions +from cubicweb.devtools.testlib import CubicWebTC, get_versions from cubicweb.schema import CubicWebSchemaLoader from cubicweb.server.sqlutils import SQL_PREFIX from cubicweb.server.repository import Repository @@ -23,22 +24,28 @@ Repository.get_versions = orig_get_versions -class MigrationCommandsTC(RepositoryBasedTC): +class MigrationCommandsTC(CubicWebTC): + + @classmethod + def init_config(cls, config): + super(MigrationCommandsTC, cls).init_config(config) + config._cubes = None + cls.repo.fill_schema() + cls.origschema = deepcopy(cls.repo.schema) + # hack to read the schema from data/migrschema + config.appid = join('data', 'migratedapp') + global migrschema + migrschema = config.load_schema() + config.appid = 'data' + assert 'Folder' in migrschema + + @classmethod + def _refresh_repo(cls): + super(MigrationCommandsTC, cls)._refresh_repo() + cls.repo.schema = cls.vreg.schema = deepcopy(cls.origschema) def setUp(self): - if not hasattr(self, '_repo'): - # first initialization - repo = self.repo # set by the RepositoryBasedTC metaclass - # force to read schema from the database - repo.config._cubes = None - repo.fill_schema() - # hack to read the schema from data/migrschema - self.repo.config.appid = join('data', 'migratedapp') - global migrschema - migrschema = self.repo.config.load_schema() - self.repo.config.appid = 'data' - assert 'Folder' in migrschema - RepositoryBasedTC.setUp(self) + CubicWebTC.setUp(self) self.mh = ServerMigrationHelper(self.repo.config, migrschema, repo=self.repo, cnx=self.cnx, interactive=False) @@ -48,7 +55,7 @@ def test_add_attribute_int(self): self.failIf('whatever' in self.schema) - self.add_entity('Note') + self.request().create_entity('Note') self.commit() orderdict = dict(self.mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, ' 'RDEF relation_type RT, RDEF ordernum O, RT name RTN')) @@ -61,7 +68,7 @@ self.assertEquals(note.whatever, 2) orderdict2 = dict(self.mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, ' 'RDEF relation_type RT, RDEF ordernum O, RT name RTN')) - whateverorder = migrschema['whatever'].rproperty('Note', 'Int', 'order') + whateverorder = migrschema['whatever'].rdef('Note', 'Int').order for k, v in orderdict.iteritems(): if v >= whateverorder: orderdict[k] = v+1 @@ -187,7 +194,7 @@ ('Personne',)) self.assertEquals(self.schema['concerne2'].objects(), ('Affaire', )) - self.assertEquals(self.schema['concerne2'].rproperty('Personne', 'Affaire', 'cardinality'), + self.assertEquals(self.schema['concerne2'].rdef('Personne', 'Affaire').cardinality, '1*') self.mh.cmd_add_relation_definition('Personne', 'concerne2', 'Note') self.assertEquals(sorted(self.schema['concerne2'].objects()), ['Affaire', 'Note']) @@ -247,12 +254,12 @@ def test_change_relation_props_non_final(self): rschema = self.schema['concerne'] - card = rschema.rproperty('Affaire', 'Societe', 'cardinality') + card = rschema.rdef('Affaire', 'Societe').cardinality self.assertEquals(card, '**') try: self.mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe', cardinality='?*') - card = rschema.rproperty('Affaire', 'Societe', 'cardinality') + card = rschema.rdef('Affaire', 'Societe').cardinality self.assertEquals(card, '?*') finally: self.mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe', @@ -260,12 +267,12 @@ def test_change_relation_props_final(self): rschema = self.schema['adel'] - card = rschema.rproperty('Personne', 'String', 'fulltextindexed') + card = rschema.rdef('Personne', 'String').fulltextindexed self.assertEquals(card, False) try: self.mh.cmd_change_relation_props('Personne', 'adel', 'String', fulltextindexed=True) - card = rschema.rproperty('Personne', 'String', 'fulltextindexed') + card = rschema.rdef('Personne', 'String').fulltextindexed self.assertEquals(card, True) finally: self.mh.cmd_change_relation_props('Personne', 'adel', 'String', @@ -273,13 +280,16 @@ def test_sync_schema_props_perms(self): cursor = self.mh.session + cursor.set_pool() nbrqlexpr_start = len(cursor.execute('RQLExpression X')) - migrschema['titre']._rproperties[('Personne', 'String')]['order'] = 7 - migrschema['adel']._rproperties[('Personne', 'String')]['order'] = 6 - migrschema['ass']._rproperties[('Personne', 'String')]['order'] = 5 + migrschema['titre'].rdefs[('Personne', 'String')].order = 7 + migrschema['adel'].rdefs[('Personne', 'String')].order = 6 + migrschema['ass'].rdefs[('Personne', 'String')].order = 5 migrschema['Personne'].description = 'blabla bla' migrschema['titre'].description = 'usually a title' - migrschema['titre']._rproperties[('Personne', 'String')]['description'] = 'title for this person' + migrschema['titre'].rdefs[('Personne', 'String')].description = 'title for this person' + delete_concerne_rqlexpr = self._rrqlexpr_rset('delete', 'concerne') + add_concerne_rqlexpr = self._rrqlexpr_rset('add', 'concerne') self.mh.cmd_sync_schema_props_perms(commit=False) self.assertEquals(cursor.execute('Any D WHERE X name "Personne", X description D')[0][0], @@ -317,7 +327,7 @@ self.assertEquals(rexpr.expression, 'O require_permission P, P name "add_note", ' 'U in_group G, P require_group G') - self.assertEquals([rt.name for rt in rexpr.reverse_add_permission], ['ecrit_par']) + self.assertEquals([rdef.rtype.name for rdef in rexpr.reverse_add_permission], ['ecrit_par']) self.assertEquals(rexpr.reverse_read_permission, ()) self.assertEquals(rexpr.reverse_delete_permission, ()) # no more rqlexpr to delete and add travaille relation @@ -335,8 +345,10 @@ self.assertEquals(len(self._erqlexpr_rset('delete', 'Affaire')), 1) self.assertEquals(len(self._erqlexpr_rset('add', 'Affaire')), 1) # no change for rqlexpr to add and delete concerne relation - self.assertEquals(len(self._rrqlexpr_rset('delete', 'concerne')), 1) - self.assertEquals(len(self._rrqlexpr_rset('add', 'concerne')), 1) + for rdef in self.schema['concerne'].rdefs.values(): + print rdef, rdef.permissions + self.assertEquals(len(self._rrqlexpr_rset('delete', 'concerne')), len(delete_concerne_rqlexpr)) + self.assertEquals(len(self._rrqlexpr_rset('add', 'concerne')), len(add_concerne_rqlexpr)) # * migrschema involve: # * 8 deletion (2 in Affaire read + Societe + travaille + para rqlexprs) # * 1 update (Affaire update) @@ -357,7 +369,7 @@ self.assertEquals(len(rset), 1) return rset.get_entity(0, 0) def _rrqlexpr_rset(self, action, ertype): - rql = 'RQLExpression X WHERE ET is CWRType, ET %s_permission X, ET name %%(name)s' % action + rql = 'RQLExpression X WHERE RT is CWRType, RDEF %s_permission X, RT name %%(name)s, RDEF relation_type RT' % action return self.mh.session.execute(rql, {'name': ertype}) def _rrqlexpr_entity(self, action, ertype): rset = self._rrqlexpr_rset(action, ertype) @@ -379,7 +391,7 @@ def test_add_remove_cube_and_deps(self): cubes = set(self.config.cubes()) schema = self.repo.schema - self.assertEquals(sorted((str(s), str(o)) for s, o in schema['see_also']._rproperties.keys()), + self.assertEquals(sorted((str(s), str(o)) for s, o in schema['see_also'].rdefs.keys()), sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'), ('Bookmark', 'Bookmark'), ('Bookmark', 'Note'), ('Note', 'Note'), ('Note', 'Bookmark')])) @@ -394,7 +406,7 @@ for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'Image', 'sender', 'in_thread', 'reply_to', 'data_format'): self.failIf(ertype in schema, ertype) - self.assertEquals(sorted(schema['see_also']._rproperties.keys()), + self.assertEquals(sorted(schema['see_also'].rdefs.keys()), sorted([('Folder', 'Folder'), ('Bookmark', 'Bookmark'), ('Bookmark', 'Note'), @@ -417,7 +429,7 @@ for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'Image', 'sender', 'in_thread', 'reply_to', 'data_format'): self.failUnless(ertype in schema, ertype) - self.assertEquals(sorted(schema['see_also']._rproperties.keys()), + self.assertEquals(sorted(schema['see_also'].rdefs.keys()), sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'), ('Bookmark', 'Bookmark'), ('Bookmark', 'Note'), diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_msplanner.py --- a/server/test/unittest_msplanner.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/unittest_msplanner.py Wed Jan 20 10:13:45 2010 +0100 @@ -59,7 +59,7 @@ # keep cnx so it's not garbage collected and the associated session is closed -repo, cnx = init_test_database('sqlite') +repo, cnx = init_test_database() class BaseMSPlannerTC(BasePlannerTC): """test planner related feature on a 3-sources repository: @@ -74,16 +74,16 @@ #_QuerierTC.setUp(self) self.setup() # hijack Affaire security - affreadperms = list(self.schema['Affaire']._groups['read']) + affreadperms = list(self.schema['Affaire'].permissions['read']) self.prevrqlexpr_affaire = affreadperms[-1] # add access to type attribute so S can't be invariant affreadperms[-1] = ERQLExpression('X concerne S?, S owned_by U, S type "X"') - self.schema['Affaire']._groups['read'] = tuple(affreadperms) + self.schema['Affaire'].permissions['read'] = tuple(affreadperms) # hijack CWUser security - userreadperms = list(self.schema['CWUser']._groups['read']) + userreadperms = list(self.schema['CWUser'].permissions['read']) self.prevrqlexpr_user = userreadperms[-1] userreadperms[-1] = ERQLExpression('X owned_by U') - self.schema['CWUser']._groups['read'] = tuple(userreadperms) + self.schema['CWUser'].permissions['read'] = tuple(userreadperms) self.add_source(FakeUserROSource, 'ldap') self.add_source(FakeCardSource, 'cards') @@ -91,19 +91,24 @@ super(BaseMSPlannerTC, self).tearDown() # restore hijacked security self.restore_orig_affaire_security() - self.restore_orig_euser_security() + self.restore_orig_cwuser_security() def restore_orig_affaire_security(self): - affreadperms = list(self.schema['Affaire']._groups['read']) + affreadperms = list(self.schema['Affaire'].permissions['read']) affreadperms[-1] = self.prevrqlexpr_affaire - self.schema['Affaire']._groups['read'] = tuple(affreadperms) - clear_cache(self.schema['Affaire'], 'ERSchema_get_rqlexprs') + self.schema['Affaire'].permissions['read'] = tuple(affreadperms) + clear_cache(self.schema['Affaire'], 'get_rqlexprs') + #clear_cache(self.schema['Affaire'], 'get_groups') - def restore_orig_euser_security(self): - userreadperms = list(self.schema['CWUser']._groups['read']) + def restore_orig_cwuser_security(self): + if hasattr(self, '_orig_cwuser_security_restored'): + return + self._orig_cwuser_security_restored = True + userreadperms = list(self.schema['CWUser'].permissions['read']) userreadperms[-1] = self.prevrqlexpr_user - self.schema['CWUser']._groups['read'] = tuple(userreadperms) - clear_cache(self.schema['CWUser'], 'ERSchema_get_rqlexprs') + self.schema['CWUser'].permissions['read'] = tuple(userreadperms) + clear_cache(self.schema['CWUser'], 'get_rqlexprs') + #clear_cache(self.schema['CWUser'], 'get_groups') class PartPlanInformationTC(BaseMSPlannerTC): @@ -989,9 +994,10 @@ ]) def test_security_3sources_identity(self): - self.restore_orig_euser_security() + self.restore_orig_cwuser_security() # use a guest user self.session = self._user_session()[1] + print self.session self._test('Any X, XT WHERE X is Card, X owned_by U, X title XT, U login "syt"', [('FetchStep', [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], @@ -1003,7 +1009,7 @@ ]) def test_security_3sources_identity_optional_var(self): - self.restore_orig_euser_security() + self.restore_orig_cwuser_security() # use a guest user self.session = self._user_session()[1] self._test('Any X,XT,U WHERE X is Card, X owned_by U?, X title XT, U login L', diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_multisources.py --- a/server/test/unittest_multisources.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/unittest_multisources.py Wed Jan 20 10:13:45 2010 +0100 @@ -11,7 +11,7 @@ from logilab.common.decorators import cached from cubicweb.devtools import TestServerConfiguration, init_test_database -from cubicweb.devtools.apptest import RepositoryBasedTC +from cubicweb.devtools.testlib import CubicWebTC, refresh_repo from cubicweb.devtools.repotest import do_monkey_patch, undo_monkey_patch TestServerConfiguration.no_sqlite_wrap = True @@ -26,16 +26,9 @@ class ExternalSource2Configuration(TestServerConfiguration): sourcefile = 'sources_multi2' -repo2, cnx2 = init_test_database('sqlite', config=ExternalSource1Configuration('data')) -cu = cnx2.cursor() -ec1 = cu.execute('INSERT Card X: X title "C3: An external card", X wikiid "aaa"')[0][0] -cu.execute('INSERT Card X: X title "C4: Ze external card", X wikiid "zzz"') -aff1 = cu.execute('INSERT Affaire X: X ref "AFFREF"')[0][0] -cnx2.commit() - MTIME = datetime.now() - timedelta(0, 10) - -repo3, cnx3 = init_test_database('sqlite', config=ExternalSource2Configuration('data')) +repo2, cnx2 = init_test_database(config=ExternalSource1Configuration('data')) +repo3, cnx3 = init_test_database(config=ExternalSource2Configuration('data')) # XXX, access existing connection, no pyro connection from cubicweb.server.sources.pyrorql import PyroRQLSource @@ -45,38 +38,47 @@ from cubicweb.dbapi import Connection Connection.close = lambda x: None -class TwoSourcesTC(RepositoryBasedTC): - repo_config = TwoSourcesConfiguration('data') +class TwoSourcesTC(CubicWebTC): + config = TwoSourcesConfiguration('data') + + @classmethod + def _refresh_repo(cls): + super(TwoSourcesTC, cls)._refresh_repo() + cnx2.rollback() + refresh_repo(repo2) + cnx3.rollback() + refresh_repo(repo3) def setUp(self): - RepositoryBasedTC.setUp(self) - self.repo.sources[-1]._query_cache.clear() - self.repo.sources[-2]._query_cache.clear() - # trigger discovery - self.execute('Card X') - self.execute('Affaire X') - self.execute('State X') - self.commit() - # don't delete external entities! - self.maxeid = self.session.system_sql('SELECT MAX(eid) FROM entities').fetchone()[0] - # add some entities - self.ic1 = self.execute('INSERT Card X: X title "C1: An internal card", X wikiid "aaai"')[0][0] - self.ic2 = self.execute('INSERT Card X: X title "C2: Ze internal card", X wikiid "zzzi"')[0][0] - self.commit() + CubicWebTC.setUp(self) do_monkey_patch() def tearDown(self): - RepositoryBasedTC.tearDown(self) + CubicWebTC.tearDown(self) undo_monkey_patch() + def setup_database(self): + cu = cnx2.cursor() + self.ec1 = cu.execute('INSERT Card X: X title "C3: An external card", X wikiid "aaa"')[0][0] + cu.execute('INSERT Card X: X title "C4: Ze external card", X wikiid "zzz"') + self.aff1 = cu.execute('INSERT Affaire X: X ref "AFFREF"')[0][0] + cnx2.commit() + # trigger discovery + self.sexecute('Card X') + self.sexecute('Affaire X') + self.sexecute('State X') + # add some entities + self.ic1 = self.sexecute('INSERT Card X: X title "C1: An internal card", X wikiid "aaai"')[0][0] + self.ic2 = self.sexecute('INSERT Card X: X title "C2: Ze internal card", X wikiid "zzzi"')[0][0] + def test_eid_comp(self): - rset = self.execute('Card X WHERE X eid > 1') + rset = self.sexecute('Card X WHERE X eid > 1') self.assertEquals(len(rset), 4) - rset = self.execute('Any X,T WHERE X title T, X eid > 1') + rset = self.sexecute('Any X,T WHERE X title T, X eid > 1') self.assertEquals(len(rset), 4) def test_metainformation(self): - rset = self.execute('Card X ORDERBY T WHERE X title T') + rset = self.sexecute('Card X ORDERBY T WHERE X title T') # 2 added to the system source, 2 added to the external source self.assertEquals(len(rset), 4) # since they are orderd by eid, we know the 3 first one is coming from the system source @@ -89,28 +91,28 @@ self.assertEquals(metainf['source'], {'adapter': 'pyrorql', 'base-url': 'http://extern.org/', 'uri': 'extern'}) self.assertEquals(metainf['type'], 'Card') self.assert_(metainf['extid']) - etype = self.execute('Any ETN WHERE X is ET, ET name ETN, X eid %(x)s', + etype = self.sexecute('Any ETN WHERE X is ET, ET name ETN, X eid %(x)s', {'x': externent.eid}, 'x')[0][0] self.assertEquals(etype, 'Card') def test_order_limit_offset(self): - rsetbase = self.execute('Any W,X ORDERBY W,X WHERE X wikiid W') + rsetbase = self.sexecute('Any W,X ORDERBY W,X WHERE X wikiid W') self.assertEquals(len(rsetbase), 4) self.assertEquals(sorted(rsetbase.rows), rsetbase.rows) - rset = self.execute('Any W,X ORDERBY W,X LIMIT 2 OFFSET 2 WHERE X wikiid W') + rset = self.sexecute('Any W,X ORDERBY W,X LIMIT 2 OFFSET 2 WHERE X wikiid W') self.assertEquals(rset.rows, rsetbase.rows[2:4]) def test_has_text(self): self.repo.sources_by_uri['extern'].synchronize(MTIME) # in case fti_update has been run before - self.failUnless(self.execute('Any X WHERE X has_text "affref"')) - self.failUnless(self.execute('Affaire X WHERE X has_text "affref"')) + self.failUnless(self.sexecute('Any X WHERE X has_text "affref"')) + self.failUnless(self.sexecute('Affaire X WHERE X has_text "affref"')) def test_anon_has_text(self): self.repo.sources_by_uri['extern'].synchronize(MTIME) # in case fti_update has been run before - self.execute('INSERT Affaire X: X ref "no readable card"')[0][0] - aff1 = self.execute('INSERT Affaire X: X ref "card"')[0][0] + self.sexecute('INSERT Affaire X: X ref "no readable card"')[0][0] + aff1 = self.sexecute('INSERT Affaire X: X ref "card"')[0][0] # grant read access - self.execute('SET X owned_by U WHERE X eid %(x)s, U login "anon"', {'x': aff1}, 'x') + self.sexecute('SET X owned_by U WHERE X eid %(x)s, U login "anon"', {'x': aff1}, 'x') self.commit() cnx = self.login('anon') cu = cnx.cursor() @@ -120,79 +122,81 @@ def test_synchronization(self): cu = cnx2.cursor() - assert cu.execute('Any X WHERE X eid %(x)s', {'x': aff1}, 'x') - cu.execute('SET X ref "BLAH" WHERE X eid %(x)s', {'x': aff1}, 'x') + assert cu.execute('Any X WHERE X eid %(x)s', {'x': self.aff1}, 'x') + cu.execute('SET X ref "BLAH" WHERE X eid %(x)s', {'x': self.aff1}, 'x') aff2 = cu.execute('INSERT Affaire X: X ref "AFFREUX"')[0][0] cnx2.commit() try: # force sync self.repo.sources_by_uri['extern'].synchronize(MTIME) - self.failUnless(self.execute('Any X WHERE X has_text "blah"')) - self.failUnless(self.execute('Any X WHERE X has_text "affreux"')) + self.failUnless(self.sexecute('Any X WHERE X has_text "blah"')) + self.failUnless(self.sexecute('Any X WHERE X has_text "affreux"')) cu.execute('DELETE Affaire X WHERE X eid %(x)s', {'x': aff2}) cnx2.commit() self.repo.sources_by_uri['extern'].synchronize(MTIME) - rset = self.execute('Any X WHERE X has_text "affreux"') + rset = self.sexecute('Any X WHERE X has_text "affreux"') self.failIf(rset) finally: # restore state - cu.execute('SET X ref "AFFREF" WHERE X eid %(x)s', {'x': aff1}, 'x') + cu.execute('SET X ref "AFFREF" WHERE X eid %(x)s', {'x': self.aff1}, 'x') cnx2.commit() def test_simplifiable_var(self): - affeid = self.execute('Affaire X WHERE X ref "AFFREF"')[0][0] - rset = self.execute('Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB', + affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] + rset = self.sexecute('Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB', {'x': affeid}, 'x') self.assertEquals(len(rset), 1) self.assertEquals(rset[0][1], "pitetre") def test_simplifiable_var_2(self): - affeid = self.execute('Affaire X WHERE X ref "AFFREF"')[0][0] - rset = self.execute('Any E WHERE E eid %(x)s, E in_state S, NOT S name "moved"', + affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] + rset = self.sexecute('Any E WHERE E eid %(x)s, E in_state S, NOT S name "moved"', {'x': affeid, 'u': self.session.user.eid}, 'x') self.assertEquals(len(rset), 1) def test_sort_func(self): - self.execute('Affaire X ORDERBY DUMB_SORT(RF) WHERE X ref RF') + self.sexecute('Affaire X ORDERBY DUMB_SORT(RF) WHERE X ref RF') def test_sort_func_ambigous(self): - self.execute('Any X ORDERBY DUMB_SORT(RF) WHERE X title RF') + self.sexecute('Any X ORDERBY DUMB_SORT(RF) WHERE X title RF') def test_in_eid(self): - iec1 = self.repo.extid2eid(self.repo.sources_by_uri['extern'], str(ec1), + iec1 = self.repo.extid2eid(self.repo.sources_by_uri['extern'], str(self.ec1), 'Card', self.session) - rset = self.execute('Any X WHERE X eid IN (%s, %s)' % (iec1, self.ic1)) + rset = self.sexecute('Any X WHERE X eid IN (%s, %s)' % (iec1, self.ic1)) self.assertEquals(sorted(r[0] for r in rset.rows), sorted([iec1, self.ic1])) def test_greater_eid(self): - rset = self.execute('Any X WHERE X eid > %s' % self.maxeid) + rset = self.sexecute('Any X WHERE X eid > %s' % (self.ic1 - 1)) self.assertEquals(len(rset.rows), 2) # self.ic1 and self.ic2 + cu = cnx2.cursor() ec2 = cu.execute('INSERT Card X: X title "glup"')[0][0] cnx2.commit() # 'X eid > something' should not trigger discovery - rset = self.execute('Any X WHERE X eid > %s' % self.maxeid) + rset = self.sexecute('Any X WHERE X eid > %s' % (self.ic1 - 1)) self.assertEquals(len(rset.rows), 2) # trigger discovery using another query - crset = self.execute('Card X WHERE X title "glup"') + crset = self.sexecute('Card X WHERE X title "glup"') self.assertEquals(len(crset.rows), 1) - rset = self.execute('Any X WHERE X eid > %s' % self.maxeid) + rset = self.sexecute('Any X WHERE X eid > %s' % (self.ic1 - 1)) self.assertEquals(len(rset.rows), 3) - rset = self.execute('Any MAX(X)') + rset = self.sexecute('Any MAX(X)') self.assertEquals(len(rset.rows), 1) self.assertEquals(rset.rows[0][0], crset[0][0]) def test_attr_unification_1(self): - n1 = self.execute('INSERT Note X: X type "AFFREF"')[0][0] - n2 = self.execute('INSERT Note X: X type "AFFREU"')[0][0] - rset = self.execute('Any X,Y WHERE X is Note, Y is Affaire, X type T, Y ref T') + n1 = self.sexecute('INSERT Note X: X type "AFFREF"')[0][0] + n2 = self.sexecute('INSERT Note X: X type "AFFREU"')[0][0] + rset = self.sexecute('Any X,Y WHERE X is Note, Y is Affaire, X type T, Y ref T') self.assertEquals(len(rset), 1, rset.rows) def test_attr_unification_2(self): + cu = cnx2.cursor() ec2 = cu.execute('INSERT Card X: X title "AFFREF"')[0][0] cnx2.commit() try: - c1 = self.execute('INSERT Card C: C title "AFFREF"')[0][0] - rset = self.execute('Any X,Y WHERE X is Card, Y is Affaire, X title T, Y ref T') + c1 = self.sexecute('INSERT Card C: C title "AFFREF"')[0][0] + rset = self.sexecute('Any X,Y WHERE X is Card, Y is Affaire, X title T, Y ref T') self.assertEquals(len(rset), 2, rset.rows) finally: cu.execute('DELETE Card X WHERE X eid %(x)s', {'x': ec2}, 'x') @@ -200,83 +204,88 @@ def test_attr_unification_neq_1(self): # XXX complete - self.execute('Any X,Y WHERE X is Note, Y is Affaire, X creation_date D, Y creation_date > D') + self.sexecute('Any X,Y WHERE X is Note, Y is Affaire, X creation_date D, Y creation_date > D') def test_attr_unification_neq_2(self): # XXX complete - self.execute('Any X,Y WHERE X is Card, Y is Affaire, X creation_date D, Y creation_date > D') + self.sexecute('Any X,Y WHERE X is Card, Y is Affaire, X creation_date D, Y creation_date > D') def test_union(self): - afeids = self.execute('Affaire X') - ueids = self.execute('CWUser X') - rset = self.execute('(Any X WHERE X is Affaire) UNION (Any X WHERE X is CWUser)') + afeids = self.sexecute('Affaire X') + ueids = self.sexecute('CWUser X') + rset = self.sexecute('(Any X WHERE X is Affaire) UNION (Any X WHERE X is CWUser)') self.assertEquals(sorted(r[0] for r in rset.rows), sorted(r[0] for r in afeids + ueids)) def test_subquery1(self): - rsetbase = self.execute('Any W,X WITH W,X BEING (Any W,X ORDERBY W,X WHERE X wikiid W)') + rsetbase = self.sexecute('Any W,X WITH W,X BEING (Any W,X ORDERBY W,X WHERE X wikiid W)') self.assertEquals(len(rsetbase), 4) self.assertEquals(sorted(rsetbase.rows), rsetbase.rows) - rset = self.execute('Any W,X LIMIT 2 OFFSET 2 WITH W,X BEING (Any W,X ORDERBY W,X WHERE X wikiid W)') + rset = self.sexecute('Any W,X LIMIT 2 OFFSET 2 WITH W,X BEING (Any W,X ORDERBY W,X WHERE X wikiid W)') self.assertEquals(rset.rows, rsetbase.rows[2:4]) - rset = self.execute('Any W,X ORDERBY W,X LIMIT 2 OFFSET 2 WITH W,X BEING (Any W,X WHERE X wikiid W)') + rset = self.sexecute('Any W,X ORDERBY W,X LIMIT 2 OFFSET 2 WITH W,X BEING (Any W,X WHERE X wikiid W)') self.assertEquals(rset.rows, rsetbase.rows[2:4]) - rset = self.execute('Any W,X WITH W,X BEING (Any W,X ORDERBY W,X LIMIT 2 OFFSET 2 WHERE X wikiid W)') + rset = self.sexecute('Any W,X WITH W,X BEING (Any W,X ORDERBY W,X LIMIT 2 OFFSET 2 WHERE X wikiid W)') self.assertEquals(rset.rows, rsetbase.rows[2:4]) def test_subquery2(self): - affeid = self.execute('Affaire X WHERE X ref "AFFREF"')[0][0] - rset = self.execute('Any X,AA,AB WITH X,AA,AB BEING (Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB)', + affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] + rset = self.sexecute('Any X,AA,AB WITH X,AA,AB BEING (Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB)', {'x': affeid}) self.assertEquals(len(rset), 1) self.assertEquals(rset[0][1], "pitetre") def test_not_relation(self): - states = set(tuple(x) for x in self.execute('Any S,SN WHERE S is State, S name SN')) + states = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN')) self.session.user.clear_all_caches() userstate = self.session.user.in_state[0] states.remove((userstate.eid, userstate.name)) - notstates = set(tuple(x) for x in self.execute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s', + notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s', {'x': self.session.user.eid}, 'x')) self.assertSetEquals(notstates, states) - aff1 = self.execute('Any X WHERE X is Affaire, X ref "AFFREF"')[0][0] - aff1stateeid, aff1statename = self.execute('Any S,SN WHERE X eid %(x)s, X in_state S, S name SN', {'x': aff1}, 'x')[0] + aff1 = self.sexecute('Any X WHERE X is Affaire, X ref "AFFREF"')[0][0] + aff1stateeid, aff1statename = self.sexecute('Any S,SN WHERE X eid %(x)s, X in_state S, S name SN', {'x': aff1}, 'x')[0] self.assertEquals(aff1statename, 'pitetre') states.add((userstate.eid, userstate.name)) states.remove((aff1stateeid, aff1statename)) - notstates = set(tuple(x) for x in self.execute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s', + notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s', {'x': aff1}, 'x')) self.assertSetEquals(notstates, states) def test_absolute_url_base_url(self): + cu = cnx2.cursor() ceid = cu.execute('INSERT Card X: X title "without wikiid to get eid based url"')[0][0] cnx2.commit() - lc = self.execute('Card X WHERE X title "without wikiid to get eid based url"').get_entity(0, 0) + lc = self.sexecute('Card X WHERE X title "without wikiid to get eid based url"').get_entity(0, 0) self.assertEquals(lc.absolute_url(), 'http://extern.org/card/eid/%s' % ceid) + cu.execute('DELETE Card X WHERE X eid %(x)s', {'x':ceid}) + cnx2.commit() def test_absolute_url_no_base_url(self): cu = cnx3.cursor() ceid = cu.execute('INSERT Card X: X title "without wikiid to get eid based url"')[0][0] cnx3.commit() - lc = self.execute('Card X WHERE X title "without wikiid to get eid based url"').get_entity(0, 0) + lc = self.sexecute('Card X WHERE X title "without wikiid to get eid based url"').get_entity(0, 0) self.assertEquals(lc.absolute_url(), 'http://testing.fr/cubicweb/card/eid/%s' % lc.eid) + cu.execute('DELETE Card X WHERE X eid %(x)s', {'x':ceid}) + cnx3.commit() def test_nonregr1(self): ueid = self.session.user.eid - affaire = self.execute('Affaire X WHERE X ref "AFFREF"').get_entity(0, 0) - self.execute('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR (X require_permission P?, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s', + affaire = self.sexecute('Affaire X WHERE X ref "AFFREF"').get_entity(0, 0) + self.sexecute('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR (X require_permission P?, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s', {'x': affaire.eid, 'u': ueid}) def test_nonregr2(self): self.session.user.fire_transition('deactivate') treid = self.session.user.latest_trinfo().eid - rset = self.execute('Any X ORDERBY D DESC WHERE E eid %(x)s, E wf_info_for X, X modification_date D', + rset = self.sexecute('Any X ORDERBY D DESC WHERE E eid %(x)s, E wf_info_for X, X modification_date D', {'x': treid}) self.assertEquals(len(rset), 1) self.assertEquals(rset.rows[0], [self.session.user.eid]) def test_nonregr3(self): - self.execute('DELETE Card X WHERE X eid %(x)s, NOT X multisource_inlined_rel Y', {'x': self.ic1}) + self.sexecute('DELETE Card X WHERE X eid %(x)s, NOT X multisource_inlined_rel Y', {'x': self.ic1}) if __name__ == '__main__': from logilab.common.testlib import unittest_main diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_querier.py --- a/server/test/unittest_querier.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/unittest_querier.py Wed Jan 20 10:13:45 2010 +0100 @@ -46,7 +46,7 @@ ('C0 text,C1 integer', {'A': 'table0.C0', 'B': 'table0.C1'})) -repo, cnx = init_test_database('sqlite') +repo, cnx = init_test_database() @@ -907,7 +907,8 @@ self.execute("INSERT Personne Y: Y nom 'toto'") rset = self.execute('Personne X WHERE X nom "toto"') self.assertEqual(len(rset.rows), 1) - self.execute("DELETE Personne Y WHERE Y nom 'toto'") + drset = self.execute("DELETE Personne Y WHERE Y nom 'toto'") + self.assertEqual(drset.rows, rset.rows) rset = self.execute('Personne X WHERE X nom "toto"') self.assertEqual(len(rset.rows), 0) diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_repository.py --- a/server/test/unittest_repository.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/unittest_repository.py Wed Jan 20 10:13:45 2010 +0100 @@ -18,12 +18,13 @@ from yams.constraints import UniqueConstraint -from cubicweb import BadConnectionId, RepositoryError, ValidationError, UnknownEid, AuthenticationError +from cubicweb import (BadConnectionId, RepositoryError, ValidationError, + UnknownEid, AuthenticationError) from cubicweb.schema import CubicWebSchema, RQLConstraint from cubicweb.dbapi import connect, repo_connect, multiple_connections_unfix -from cubicweb.devtools.apptest import RepositoryBasedTC +from cubicweb.devtools.testlib import CubicWebTC from cubicweb.devtools.repotest import tuplify -from cubicweb.server import repository +from cubicweb.server import repository, hook from cubicweb.server.sqlutils import SQL_PREFIX @@ -31,48 +32,35 @@ os.system('pyro-ns >/dev/null 2>/dev/null &') -class RepositoryTC(RepositoryBasedTC): +class RepositoryTC(CubicWebTC): """ singleton providing access to a persistent storage for entities and relation """ -# def setUp(self): -# pass - -# def tearDown(self): -# self.repo.config.db_perms = True -# cnxid = self.repo.connect(*self.default_user_password()) -# for etype in ('Affaire', 'Note', 'Societe', 'Personne'): -# self.repo.execute(cnxid, 'DELETE %s X' % etype) -# self.repo.commit(cnxid) -# self.repo.close(cnxid) - def test_fill_schema(self): self.repo.schema = CubicWebSchema(self.repo.config.appid) self.repo.config._cubes = None # avoid assertion error + self.repo.config.repairing = True # avoid versions checking self.repo.fill_schema() - pool = self.repo._get_pool() table = SQL_PREFIX + 'CWEType' namecol = SQL_PREFIX + 'name' finalcol = SQL_PREFIX + 'final' - try: - cu = self.session.system_sql('SELECT %s FROM %s WHERE %s is NULL' % ( - namecol, table, finalcol)) - self.assertEquals(cu.fetchall(), []) - cu = self.session.system_sql('SELECT %s FROM %s WHERE %s=%%(final)s ORDER BY %s' - % (namecol, table, finalcol, namecol), {'final': 'TRUE'}) - self.assertEquals(cu.fetchall(), [(u'Boolean',), (u'Bytes',), - (u'Date',), (u'Datetime',), - (u'Decimal',),(u'Float',), - (u'Int',), - (u'Interval',), (u'Password',), - (u'String',), (u'Time',)]) - finally: - self.repo._free_pool(pool) + self.session.set_pool() + cu = self.session.system_sql('SELECT %s FROM %s WHERE %s is NULL' % ( + namecol, table, finalcol)) + self.assertEquals(cu.fetchall(), []) + cu = self.session.system_sql('SELECT %s FROM %s WHERE %s=%%(final)s ORDER BY %s' + % (namecol, table, finalcol, namecol), {'final': 'TRUE'}) + self.assertEquals(cu.fetchall(), [(u'Boolean',), (u'Bytes',), + (u'Date',), (u'Datetime',), + (u'Decimal',),(u'Float',), + (u'Int',), + (u'Interval',), (u'Password',), + (u'String',), (u'Time',)]) def test_schema_has_owner(self): repo = self.repo - cnxid = repo.connect(*self.default_user_password()) + cnxid = repo.connect(self.admlogin, password=self.admpassword) self.failIf(repo.execute(cnxid, 'CWEType X WHERE NOT X owned_by U')) self.failIf(repo.execute(cnxid, 'CWRType X WHERE NOT X owned_by U')) self.failIf(repo.execute(cnxid, 'CWAttribute X WHERE NOT X owned_by U')) @@ -81,18 +69,21 @@ self.failIf(repo.execute(cnxid, 'CWConstraintType X WHERE NOT X owned_by U')) def test_connect(self): - login, passwd = self.default_user_password() - self.assert_(self.repo.connect(login, passwd)) + self.assert_(self.repo.connect(self.admlogin, password=self.admpassword)) + self.assertRaises(AuthenticationError, + self.repo.connect, self.admlogin, password='nimportnawak') + self.assertRaises(AuthenticationError, + self.repo.connect, self.admlogin, password=None) self.assertRaises(AuthenticationError, - self.repo.connect, login, 'nimportnawak') + self.repo.connect, None, password=None) self.assertRaises(AuthenticationError, - self.repo.connect, login, None) + self.repo.connect, self.admlogin) self.assertRaises(AuthenticationError, - self.repo.connect, None, None) + self.repo.connect, None) def test_execute(self): repo = self.repo - cnxid = repo.connect(*self.default_user_password()) + cnxid = repo.connect(self.admlogin, password=self.admpassword) repo.execute(cnxid, 'Any X') repo.execute(cnxid, 'Any X where X is Personne') repo.execute(cnxid, 'Any X where X is Personne, X nom ~= "to"') @@ -101,15 +92,15 @@ def test_login_upassword_accent(self): repo = self.repo - cnxid = repo.connect(*self.default_user_password()) + cnxid = repo.connect(self.admlogin, password=self.admpassword) repo.execute(cnxid, 'INSERT CWUser X: X login %(login)s, X upassword %(passwd)s, X in_group G WHERE G name "users"', {'login': u"barnabé", 'passwd': u"héhéhé".encode('UTF8')}) repo.commit(cnxid) repo.close(cnxid) - self.assert_(repo.connect(u"barnabé", u"héhéhé".encode('UTF8'))) + self.assert_(repo.connect(u"barnabé", password=u"héhéhé".encode('UTF8'))) def test_invalid_entity_rollback(self): - cnxid = self.repo.connect(*self.default_user_password()) + cnxid = self.repo.connect(self.admlogin, password=self.admpassword) # no group self.repo.execute(cnxid, 'INSERT CWUser X: X login %(login)s, X upassword %(passwd)s', @@ -119,7 +110,7 @@ def test_close(self): repo = self.repo - cnxid = repo.connect(*self.default_user_password()) + cnxid = repo.connect(self.admlogin, password=self.admpassword) self.assert_(cnxid) repo.close(cnxid) self.assertRaises(BadConnectionId, repo.execute, cnxid, 'Any X') @@ -130,9 +121,9 @@ def test_shared_data(self): repo = self.repo - cnxid = repo.connect(*self.default_user_password()) + cnxid = repo.connect(self.admlogin, password=self.admpassword) repo.set_shared_data(cnxid, 'data', 4) - cnxid2 = repo.connect(*self.default_user_password()) + cnxid2 = repo.connect(self.admlogin, password=self.admpassword) self.assertEquals(repo.get_shared_data(cnxid, 'data'), 4) self.assertEquals(repo.get_shared_data(cnxid2, 'data'), None) repo.set_shared_data(cnxid2, 'data', 5) @@ -150,14 +141,14 @@ def test_check_session(self): repo = self.repo - cnxid = repo.connect(*self.default_user_password()) + cnxid = repo.connect(self.admlogin, password=self.admpassword) self.assertEquals(repo.check_session(cnxid), None) repo.close(cnxid) self.assertRaises(BadConnectionId, repo.check_session, cnxid) def test_transaction_base(self): repo = self.repo - cnxid = repo.connect(*self.default_user_password()) + cnxid = repo.connect(self.admlogin, password=self.admpassword) # check db state result = repo.execute(cnxid, 'Personne X') self.assertEquals(result.rowcount, 0) @@ -176,7 +167,7 @@ def test_transaction_base2(self): repo = self.repo - cnxid = repo.connect(*self.default_user_password()) + cnxid = repo.connect(self.admlogin, password=self.admpassword) # rollback relation insertion repo.execute(cnxid, "SET U in_group G WHERE U login 'admin', G name 'guests'") result = repo.execute(cnxid, "Any U WHERE U in_group G, U login 'admin', G name 'guests'") @@ -187,7 +178,7 @@ def test_transaction_base3(self): repo = self.repo - cnxid = repo.connect(*self.default_user_password()) + cnxid = repo.connect(self.admlogin, password=self.admpassword) # rollback state change which trigger TrInfo insertion user = repo._get_session(cnxid).user user.fire_transition('deactivate') @@ -202,7 +193,7 @@ def test_close_wait_processing_request(self): repo = self.repo - cnxid = repo.connect(*self.default_user_password()) + cnxid = repo.connect(self.admlogin, password=self.admpassword) repo.execute(cnxid, 'INSERT CWUser X: X login "toto", X upassword "tutu", X in_group G WHERE G name "users"') repo.commit(cnxid) # close has to be in the thread due to sqlite limitations @@ -223,8 +214,11 @@ self.assertListEquals([r.type for r in schema.eschema('CWAttribute').ordered_relations() if not r.type in ('eid', 'is', 'is_instance_of', 'identity', 'creation_date', 'modification_date', 'cwuri', - 'owned_by', 'created_by')], - ['relation_type', 'from_entity', 'to_entity', 'in_basket', 'constrained_by', + 'owned_by', 'created_by', + 'add_permission', 'delete_permission', 'read_permission')], + ['relation_type', + 'from_entity', 'to_entity', + 'in_basket', 'constrained_by', 'cardinality', 'ordernum', 'indexed', 'fulltextindexed', 'internationalizable', 'defaultval', 'description', 'description_format']) @@ -232,7 +226,7 @@ self.assertEquals(schema.eschema('CWEType').main_attribute(), 'name') self.assertEquals(schema.eschema('State').main_attribute(), 'name') - constraints = schema.rschema('name').rproperty('CWEType', 'String', 'constraints') + constraints = schema.rschema('name').rdef('CWEType', 'String').constraints self.assertEquals(len(constraints), 2) for cstr in constraints[:]: if isinstance(cstr, UniqueConstraint): @@ -244,7 +238,7 @@ self.assertEquals(sizeconstraint.min, None) self.assertEquals(sizeconstraint.max, 64) - constraints = schema.rschema('relation_type').rproperty('CWAttribute', 'CWRType', 'constraints') + constraints = schema.rschema('relation_type').rdef('CWAttribute', 'CWRType').constraints self.assertEquals(len(constraints), 1) cstr = constraints[0] self.assert_(isinstance(cstr, RQLConstraint)) @@ -271,7 +265,7 @@ repository.pyro_unregister(self.repo.config) def _pyro_client(self, done): - cnx = connect(self.repo.config.appid, u'admin', 'gingkow') + cnx = connect(self.repo.config.appid, u'admin', password='gingkow') try: # check we can get the schema schema = cnx.get_schema() @@ -286,7 +280,7 @@ def test_internal_api(self): repo = self.repo - cnxid = repo.connect(*self.default_user_password()) + cnxid = repo.connect(self.admlogin, password=self.admpassword) session = repo._get_session(cnxid, setpool=True) self.assertEquals(repo.type_and_source_from_eid(1, session), ('CWGroup', 'system', None)) @@ -304,7 +298,7 @@ def test_session_api(self): repo = self.repo - cnxid = repo.connect(*self.default_user_password()) + cnxid = repo.connect(self.admlogin, password=self.admpassword) self.assertEquals(repo.user_info(cnxid), (5, 'admin', set([u'managers']), {})) self.assertEquals(repo.describe(cnxid, 1), (u'CWGroup', u'system', None)) repo.close(cnxid) @@ -313,7 +307,7 @@ def test_shared_data_api(self): repo = self.repo - cnxid = repo.connect(*self.default_user_password()) + cnxid = repo.connect(self.admlogin, password=self.admpassword) self.assertEquals(repo.get_shared_data(cnxid, 'data'), None) repo.set_shared_data(cnxid, 'data', 4) self.assertEquals(repo.get_shared_data(cnxid, 'data'), 4) @@ -339,14 +333,14 @@ # print 'test time: %.3f (time) %.3f (cpu)' % ((time() - t), clock() - c) def test_delete_if_singlecard1(self): - note = self.add_entity('Affaire') - p1 = self.add_entity('Personne', nom=u'toto') + note = self.request().create_entity('Affaire') + p1 = self.request().create_entity('Personne', nom=u'toto') self.execute('SET A todo_by P WHERE A eid %(x)s, P eid %(p)s', {'x': note.eid, 'p': p1.eid}) rset = self.execute('Any P WHERE A todo_by P, A eid %(x)s', {'x': note.eid}) self.assertEquals(len(rset), 1) - p2 = self.add_entity('Personne', nom=u'tutu') + p2 = self.request().create_entity('Personne', nom=u'tutu') self.execute('SET A todo_by P WHERE A eid %(x)s, P eid %(p)s', {'x': note.eid, 'p': p2.eid}) rset = self.execute('Any P WHERE A todo_by P, A eid %(x)s', @@ -355,37 +349,34 @@ self.assertEquals(rset.rows[0][0], p2.eid) -class DataHelpersTC(RepositoryBasedTC): - - def setUp(self): - """ called before each test from this class """ - cnxid = self.repo.connect(*self.default_user_password()) - self.session = self.repo._sessions[cnxid] - self.session.set_pool() - - def tearDown(self): - self.session.rollback() +class DataHelpersTC(CubicWebTC): def test_create_eid(self): + self.session.set_pool() self.assert_(self.repo.system_source.create_eid(self.session)) def test_source_from_eid(self): + self.session.set_pool() self.assertEquals(self.repo.source_from_eid(1, self.session), self.repo.sources_by_uri['system']) def test_source_from_eid_raise(self): + self.session.set_pool() self.assertRaises(UnknownEid, self.repo.source_from_eid, -2, self.session) def test_type_from_eid(self): + self.session.set_pool() self.assertEquals(self.repo.type_from_eid(1, self.session), 'CWGroup') def test_type_from_eid_raise(self): + self.session.set_pool() self.assertRaises(UnknownEid, self.repo.type_from_eid, -2, self.session) def test_add_delete_info(self): entity = self.repo.vreg['etypes'].etype_class('Personne')(self.session) entity.eid = -1 entity.complete = lambda x: None + self.session.set_pool() self.repo.add_info(self.session, entity, self.repo.sources_by_uri['system']) cu = self.session.system_sql('SELECT * FROM entities WHERE eid = -1') data = cu.fetchall() @@ -400,13 +391,14 @@ self.assertEquals(data, []) -class FTITC(RepositoryBasedTC): +class FTITC(CubicWebTC): def test_reindex_and_modified_since(self): eidp = self.execute('INSERT Personne X: X nom "toto", X prenom "tutu"')[0][0] self.commit() ts = datetime.now() self.assertEquals(len(self.execute('Personne X WHERE X has_text "tutu"')), 1) + self.session.set_pool() cu = self.session.system_sql('SELECT mtime, eid FROM entities WHERE eid = %s' % eidp) omtime = cu.fetchone()[0] # our sqlite datetime adapter is ignore seconds fraction, so we have to @@ -415,6 +407,7 @@ self.execute('SET X nom "tata" WHERE X eid %(x)s', {'x': eidp}, 'x') self.commit() self.assertEquals(len(self.execute('Personne X WHERE X has_text "tutu"')), 1) + self.session.set_pool() cu = self.session.system_sql('SELECT mtime FROM entities WHERE eid = %s' % eidp) mtime = cu.fetchone()[0] self.failUnless(omtime < mtime) @@ -433,7 +426,7 @@ def test_composite_entity(self): assert self.schema.rschema('use_email').fulltext_container == 'subject' - eid = self.add_entity('EmailAddress', address=u'toto@logilab.fr').eid + eid = self.request().create_entity('EmailAddress', address=u'toto@logilab.fr').eid self.commit() rset = self.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) self.assertEquals(rset.rows, [[eid]]) @@ -445,14 +438,14 @@ self.commit() rset = self.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) self.assertEquals(rset.rows, []) - eid = self.add_entity('EmailAddress', address=u'tutu@logilab.fr').eid + eid = self.request().create_entity('EmailAddress', address=u'tutu@logilab.fr').eid self.execute('SET X use_email Y WHERE X login "admin", Y eid %(y)s', {'y': eid}) self.commit() rset = self.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'}) self.assertEquals(rset.rows, [[self.session.user.eid]]) -class DBInitTC(RepositoryBasedTC): +class DBInitTC(CubicWebTC): def test_versions_inserted(self): inserted = [r[0] for r in self.execute('Any K ORDERBY K WHERE P pkey K, P pkey ~= "system.version.%"')] @@ -462,70 +455,42 @@ u'system.version.file', u'system.version.folder', u'system.version.tag']) -class InlineRelHooksTC(RepositoryBasedTC): +CALLED = [] +class EcritParHook(hook.Hook): + __regid__ = 'inlinedrelhook' + __select__ = hook.Hook.__select__ & hook.match_rtype('ecrit_par') + events = ('before_add_relation', 'after_add_relation', + 'before_delete_relation', 'after_delete_relation') + def __call__(self): + CALLED.append((self.event, self.eidfrom, self.rtype, self.eidto)) + +class InlineRelHooksTC(CubicWebTC): """test relation hooks are called for inlined relations """ def setUp(self): - RepositoryBasedTC.setUp(self) + CubicWebTC.setUp(self) self.hm = self.repo.hm - self.called = [] - - def _before_relation_hook(self, pool, fromeid, rtype, toeid): - self.called.append((fromeid, rtype, toeid)) + CALLED[:] = () def _after_relation_hook(self, pool, fromeid, rtype, toeid): self.called.append((fromeid, rtype, toeid)) - def test_before_add_inline_relation(self): - """make sure before__relation hooks are called directly""" - self.hm.register_hook(self._before_relation_hook, - 'before_add_relation', 'ecrit_par') + def test_inline_relation(self): + """make sure _relation hooks are called for inlined relation""" + self.hm.register(EcritParHook) eidp = self.execute('INSERT Personne X: X nom "toto"')[0][0] eidn = self.execute('INSERT Note X: X type "T"')[0][0] self.execute('SET N ecrit_par Y WHERE N type "T", Y nom "toto"') - self.assertEquals(self.called, [(eidn, 'ecrit_par', eidp)]) - - def test_after_add_inline_relation(self): - """make sure after__relation hooks are deferred""" - self.hm.register_hook(self._after_relation_hook, - 'after_add_relation', 'ecrit_par') - eidp = self.execute('INSERT Personne X: X nom "toto"')[0][0] - eidn = self.execute('INSERT Note X: X type "T"')[0][0] - self.assertEquals(self.called, []) - self.execute('SET N ecrit_par Y WHERE N type "T", Y nom "toto"') - self.assertEquals(self.called, [(eidn, 'ecrit_par', eidp,)]) - - def test_after_add_inline(self): - """make sure after__relation hooks are deferred""" - p1 = self.add_entity('Personne', nom=u'toto') - self.hm.register_hook(self._after_relation_hook, - 'after_add_relation', 'ecrit_par') + self.assertEquals(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp), + ('after_add_relation', eidn, 'ecrit_par', eidp)]) + CALLED[:] = () + self.execute('DELETE N ecrit_par Y WHERE N type "T", Y nom "toto"') + self.assertEquals(CALLED, [('before_delete_relation', eidn, 'ecrit_par', eidp), + ('after_delete_relation', eidn, 'ecrit_par', eidp)]) + CALLED[:] = () eidn = self.execute('INSERT Note N: N ecrit_par P WHERE P nom "toto"')[0][0] - self.assertEquals(self.called, [(eidn, 'ecrit_par', p1.eid,)]) - - def test_before_delete_inline_relation(self): - """make sure before__relation hooks are called directly""" - self.hm.register_hook(self._before_relation_hook, - 'before_delete_relation', 'ecrit_par') - eidp = self.execute('INSERT Personne X: X nom "toto"')[0][0] - eidn = self.execute('INSERT Note X: X type "T"')[0][0] - self.execute('SET N ecrit_par Y WHERE N type "T", Y nom "toto"') - self.execute('DELETE N ecrit_par Y WHERE N type "T", Y nom "toto"') - self.assertEquals(self.called, [(eidn, 'ecrit_par', eidp)]) - rset = self.execute('Any Y where N ecrit_par Y, N type "T", Y nom "toto"') - # make sure the relation is really deleted - self.failUnless(len(rset) == 0, "failed to delete inline relation") - - def test_after_delete_inline_relation(self): - """make sure after__relation hooks are deferred""" - self.hm.register_hook(self._after_relation_hook, - 'after_delete_relation', 'ecrit_par') - eidp = self.execute('INSERT Personne X: X nom "toto"')[0][0] - eidn = self.execute('INSERT Note X: X type "T"')[0][0] - self.execute('SET N ecrit_par Y WHERE N type "T", Y nom "toto"') - self.assertEquals(self.called, []) - self.execute('DELETE N ecrit_par Y WHERE N type "T", Y nom "toto"') - self.assertEquals(self.called, [(eidn, 'ecrit_par', eidp,)]) + self.assertEquals(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp), + ('after_add_relation', eidn, 'ecrit_par', eidp)]) if __name__ == '__main__': diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_rql2sql.py --- a/server/test/unittest_rql2sql.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/unittest_rql2sql.py Wed Jan 20 10:13:45 2010 +0100 @@ -34,6 +34,7 @@ config.bootstrap_cubes() schema = config.load_schema() schema['in_state'].inlined = True +schema['state_of'].inlined = False schema['comments'].inlined = False PARSER = [ @@ -358,10 +359,6 @@ ('DISTINCT Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), EXISTS(X read_permission Y)', '''SELECT DISTINCT _X.cw_eid, rel_read_permission0.eid_to FROM cw_CWEType AS _X, read_permission_relation AS rel_read_permission0 -WHERE _X.cw_name=CWGroup AND rel_read_permission0.eid_to IN(1, 2, 3) AND EXISTS(SELECT 1 WHERE rel_read_permission0.eid_from=_X.cw_eid) -UNION -SELECT DISTINCT _X.cw_eid, rel_read_permission0.eid_to -FROM cw_CWRType AS _X, read_permission_relation AS rel_read_permission0 WHERE _X.cw_name=CWGroup AND rel_read_permission0.eid_to IN(1, 2, 3) AND EXISTS(SELECT 1 WHERE rel_read_permission0.eid_from=_X.cw_eid)'''), # no distinct, Y can't be invariant @@ -372,14 +369,6 @@ UNION ALL SELECT _X.cw_eid, _Y.cw_eid FROM cw_CWEType AS _X, cw_RQLExpression AS _Y -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid) -UNION ALL -SELECT _X.cw_eid, _Y.cw_eid -FROM cw_CWGroup AS _Y, cw_CWRType AS _X -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid) -UNION ALL -SELECT _X.cw_eid, _Y.cw_eid -FROM cw_CWRType AS _X, cw_RQLExpression AS _Y WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)'''), # DISTINCT but NEGED exists, can't be invariant @@ -390,14 +379,6 @@ UNION SELECT DISTINCT _X.cw_eid, _Y.cw_eid FROM cw_CWEType AS _X, cw_RQLExpression AS _Y -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid) -UNION -SELECT DISTINCT _X.cw_eid, _Y.cw_eid -FROM cw_CWGroup AS _Y, cw_CWRType AS _X -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid) -UNION -SELECT DISTINCT _X.cw_eid, _Y.cw_eid -FROM cw_CWRType AS _X, cw_RQLExpression AS _Y WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)'''), # should generate the same query as above @@ -408,14 +389,6 @@ UNION SELECT DISTINCT _X.cw_eid, _Y.cw_eid FROM cw_CWEType AS _X, cw_RQLExpression AS _Y -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid) -UNION -SELECT DISTINCT _X.cw_eid, _Y.cw_eid -FROM cw_CWGroup AS _Y, cw_CWRType AS _X -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid) -UNION -SELECT DISTINCT _X.cw_eid, _Y.cw_eid -FROM cw_CWRType AS _X, cw_RQLExpression AS _Y WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)'''), # neged relation, can't be inveriant @@ -426,14 +399,6 @@ UNION ALL SELECT _X.cw_eid, _Y.cw_eid FROM cw_CWEType AS _X, cw_RQLExpression AS _Y -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid) -UNION ALL -SELECT _X.cw_eid, _Y.cw_eid -FROM cw_CWGroup AS _Y, cw_CWRType AS _X -WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid) -UNION ALL -SELECT _X.cw_eid, _Y.cw_eid -FROM cw_CWRType AS _X, cw_RQLExpression AS _Y WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)'''), ('Any MAX(X)+MIN(X), N GROUPBY N WHERE X name N, X is IN (Basket, Folder, Tag);', diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_rqlannotation.py --- a/server/test/unittest_rqlannotation.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/unittest_rqlannotation.py Wed Jan 20 10:13:45 2010 +0100 @@ -6,7 +6,7 @@ from cubicweb.devtools import init_test_database from cubicweb.devtools.repotest import BaseQuerierTC -repo, cnx = init_test_database('sqlite') +repo, cnx = init_test_database() class SQLGenAnnotatorTC(BaseQuerierTC): repo = repo @@ -268,11 +268,11 @@ def test_or_exists_1(self): # query generated by security rewriting rqlst = self._prepare('DISTINCT Any A,S WHERE A is Affaire, S nom "chouette", S is IN(Division, Societe, SubDivision),' - '(EXISTS(A owned_by %(D)s)) ' - 'OR ((((EXISTS(E concerne C?, C owned_by %(D)s, A identity E, C is Note, E is Affaire)) ' - 'OR (EXISTS(I concerne H?, H owned_by %(D)s, H is Societe, A identity I, I is Affaire))) ' - 'OR (EXISTS(J concerne G?, G owned_by %(D)s, G is SubDivision, A identity J, J is Affaire))) ' - 'OR (EXISTS(K concerne F?, F owned_by %(D)s, F is Division, A identity K, K is Affaire)))') + '(EXISTS(A owned_by D)) ' + 'OR ((((EXISTS(E concerne C?, C owned_by D, A identity E, C is Note, E is Affaire)) ' + 'OR (EXISTS(I concerne H?, H owned_by D, H is Societe, A identity I, I is Affaire))) ' + 'OR (EXISTS(J concerne G?, G owned_by D, G is SubDivision, A identity J, J is Affaire))) ' + 'OR (EXISTS(K concerne F?, F owned_by D, F is Division, A identity K, K is Affaire)))') self.assertEquals(rqlst.defined_vars['A']._q_invariant, False) self.assertEquals(rqlst.defined_vars['S']._q_invariant, False) @@ -285,11 +285,11 @@ def test_or_exists_3(self): rqlst = self._prepare('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 ' 'WHERE C is Societe, S concerne C, C nom CS, ' - '(EXISTS(S owned_by 1)) OR (EXISTS(S documented_by N, N title "published"))') + '(EXISTS(S owned_by D)) OR (EXISTS(S documented_by N, N title "published"))') self.assertEquals(rqlst.defined_vars['S']._q_invariant, True) rqlst = self._prepare('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 ' 'WHERE S is Affaire, C is Societe, S concerne C, C nom CS, ' - '(EXISTS(S owned_by 1)) OR (EXISTS(S documented_by N, N title "published"))') + '(EXISTS(S owned_by D)) OR (EXISTS(S documented_by N, N title "published"))') self.assertEquals(rqlst.defined_vars['S']._q_invariant, True) def test_nonregr_ambiguity(self): diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_schemaserial.py --- a/server/test/unittest_schemaserial.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/unittest_schemaserial.py Wed Jan 20 10:13:45 2010 +0100 @@ -16,6 +16,7 @@ schema = loader.load(config) from cubicweb.server.schemaserial import * +from cubicweb.server.schemaserial import _erperms2rql as erperms2rql class Schema2RQLTC(TestCase): @@ -53,15 +54,15 @@ ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s', {'rt': 'relation_type', 'description': u'', 'composite': u'object', 'oe': 'CWRType', + 'ordernum': 1, 'cardinality': u'1*', 'se': 'CWAttribute'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is CWRelation', + {'rt': 'relation_type', 'oe': 'CWRType', 'ctname': u'RQLConstraint', 'se': 'CWAttribute', 'value': u';O;O final TRUE\n'}), + + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s', + {'rt': 'relation_type', 'description': u'', 'composite': u'object', 'oe': 'CWRType', 'ordernum': 1, 'cardinality': u'1*', 'se': 'CWRelation'}), ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is CWRelation', {'rt': 'relation_type', 'oe': 'CWRType', 'ctname': u'RQLConstraint', 'se': 'CWRelation', 'value': u';O;O final FALSE\n'}), - - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s', - {'rt': 'relation_type', 'description': u'', 'composite': u'object', 'oe': 'CWRType', - 'ordernum': 1, 'cardinality': u'1*', 'se': 'CWAttribute'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is CWRelation', - {'rt': 'relation_type', 'oe': 'CWRType', 'ctname': u'RQLConstraint', 'se': 'CWAttribute', 'value': u';O;O final TRUE\n'}), ]) def test_rschema2rql2(self): @@ -70,14 +71,19 @@ ('INSERT CWRType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symetric %(symetric)s', {'description': u'core relation giving to a group the permission to add an entity or relation type', 'symetric': False, 'name': u'add_permission', 'final': False, 'fulltext_container': None, 'inlined': False}), ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s', + {'rt': 'add_permission', 'description': u'groups allowed to add entities/relations of this type', 'composite': None, 'oe': 'CWGroup', 'ordernum': 3, 'cardinality': u'**', 'se': 'CWAttribute'}), + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s', + {'rt': 'add_permission', 'description': u'rql expression allowing to add entities/relations of this type', 'composite': 'subject', 'oe': 'RQLExpression', 'ordernum': 5, 'cardinality': u'*?', 'se': 'CWAttribute'}), + + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s', {'rt': 'add_permission', 'description': u'groups allowed to add entities/relations of this type', 'composite': None, 'oe': 'CWGroup', 'ordernum': 3, 'cardinality': u'**', 'se': 'CWEType'}), ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s', - {'rt': 'add_permission', 'description': u'groups allowed to add entities/relations of this type', 'composite': None, 'oe': 'CWGroup', 'ordernum': 3, 'cardinality': u'**', 'se': 'CWRType'}), + {'rt': 'add_permission', 'description': u'rql expression allowing to add entities/relations of this type', 'composite': 'subject', 'oe': 'RQLExpression', 'ordernum': 5, 'cardinality': u'*?', 'se': 'CWEType'}), ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s', - {'rt': 'add_permission', 'description': u'rql expression allowing to add entities/relations of this type', 'composite': 'subject', 'oe': 'RQLExpression', 'ordernum': 5, 'cardinality': u'*?', 'se': 'CWEType'}), + {'rt': 'add_permission', 'description': u'groups allowed to add entities/relations of this type', 'composite': None, 'oe': 'CWGroup', 'ordernum': 3, 'cardinality': u'**', 'se': 'CWRelation'}), ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s', - {'rt': 'add_permission', 'description': u'rql expression allowing to add entities/relations of this type', 'composite': 'subject', 'oe': 'RQLExpression', 'ordernum': 5, 'cardinality': u'*?', 'se': 'CWRType'}), + {'rt': 'add_permission', 'description': u'rql expression allowing to add entities/relations of this type', 'composite': 'subject', 'oe': 'RQLExpression', 'ordernum': 5, 'cardinality': u'*?', 'se': 'CWRelation'}), ]) def test_rschema2rql3(self): @@ -87,18 +93,18 @@ {'description': u'', 'symetric': False, 'name': u'cardinality', 'final': True, 'fulltext_container': None, 'inlined': False}), ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s', + {'rt': 'cardinality', 'description': u'subject/object cardinality', 'internationalizable': True, 'fulltextindexed': False, 'ordernum': 5, 'defaultval': None, 'indexed': False, 'cardinality': u'?1', 'oe': 'String', 'se': 'CWAttribute'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is CWAttribute', + {'rt': 'cardinality', 'oe': 'String', 'ctname': u'SizeConstraint', 'se': 'CWAttribute', 'value': u'max=2'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is CWAttribute', + {'rt': 'cardinality', 'oe': 'String', 'ctname': u'StaticVocabularyConstraint', 'se': 'CWAttribute', 'value': u"u'?1', u'11'"}), + + ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s', {'rt': 'cardinality', 'description': u'subject/object cardinality', 'internationalizable': True, 'fulltextindexed': False, 'ordernum': 5, 'defaultval': None, 'indexed': False, 'cardinality': u'?1', 'oe': 'String', 'se': 'CWRelation'}), ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is CWAttribute', {'rt': 'cardinality', 'oe': 'String', 'ctname': u'SizeConstraint', 'se': 'CWRelation', 'value': u'max=2'}), ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is CWAttribute', {'rt': 'cardinality', 'oe': 'String', 'ctname': u'StaticVocabularyConstraint', 'se': 'CWRelation', 'value': u"u'?*', u'1*', u'+*', u'**', u'?+', u'1+', u'++', u'*+', u'?1', u'11', u'+1', u'*1', u'??', u'1?', u'+?', u'*?'"}), - - ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s', - {'rt': 'cardinality', 'description': u'subject/object cardinality', 'internationalizable': True, 'fulltextindexed': False, 'ordernum': 5, 'defaultval': None, 'indexed': False, 'cardinality': u'?1', 'oe': 'String', 'se': 'CWAttribute'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is CWAttribute', - {'rt': 'cardinality', 'oe': 'String', 'ctname': u'SizeConstraint', 'se': 'CWAttribute', 'value': u'max=2'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is CWAttribute', - {'rt': 'cardinality', 'oe': 'String', 'ctname': u'StaticVocabularyConstraint', 'se': 'CWAttribute', 'value': u"u'?1', u'11'"}), ]) @@ -142,36 +148,34 @@ } def test_eperms2rql1(self): - self.assertListEquals([rql for rql, kwargs in erperms2rql(schema.eschema('CWEType'), self.GROUP_MAPPING)], - ['SET X read_permission Y WHERE X is CWEType, X name %(name)s, Y eid %(g)s', - 'SET X read_permission Y WHERE X is CWEType, X name %(name)s, Y eid %(g)s', - 'SET X read_permission Y WHERE X is CWEType, X name %(name)s, Y eid %(g)s', - 'SET X add_permission Y WHERE X is CWEType, X name %(name)s, Y eid %(g)s', - 'SET X update_permission Y WHERE X is CWEType, X name %(name)s, Y eid %(g)s', - 'SET X update_permission Y WHERE X is CWEType, X name %(name)s, Y eid %(g)s', - 'SET X delete_permission Y WHERE X is CWEType, X name %(name)s, Y eid %(g)s', + self.assertListEquals([(rql, kwargs) for rql, kwargs in erperms2rql(schema.eschema('CWEType'), self.GROUP_MAPPING)], + [('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 0}), + ('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 1}), + ('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 2}), + ('SET X add_permission Y WHERE Y eid %(g)s, ', {'g': 0}), + ('SET X update_permission Y WHERE Y eid %(g)s, ', {'g': 0}), + ('SET X update_permission Y WHERE Y eid %(g)s, ', {'g': 3}), + ('SET X delete_permission Y WHERE Y eid %(g)s, ', {'g': 0}), ]) def test_rperms2rql2(self): - self.assertListEquals([rql for rql, kwargs in erperms2rql(schema.rschema('read_permission'), self.GROUP_MAPPING)], - ['SET X read_permission Y WHERE X is CWRType, X name %(name)s, Y eid %(g)s', - 'SET X read_permission Y WHERE X is CWRType, X name %(name)s, Y eid %(g)s', - 'SET X read_permission Y WHERE X is CWRType, X name %(name)s, Y eid %(g)s', - 'SET X add_permission Y WHERE X is CWRType, X name %(name)s, Y eid %(g)s', - 'SET X delete_permission Y WHERE X is CWRType, X name %(name)s, Y eid %(g)s', + self.assertListEquals([(rql, kwargs) for rql, kwargs in erperms2rql(schema.rschema('read_permission').rdef('CWEType', 'CWGroup'), self.GROUP_MAPPING)], + [('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 0}), + ('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 1}), + ('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 2}), + ('SET X add_permission Y WHERE Y eid %(g)s, ', {'g': 0}), + ('SET X delete_permission Y WHERE Y eid %(g)s, ', {'g': 0}), ]) def test_rperms2rql3(self): - self.assertListEquals([rql for rql, kwargs in erperms2rql(schema.rschema('name'), self.GROUP_MAPPING)], - ['SET X read_permission Y WHERE X is CWRType, X name %(name)s, Y eid %(g)s', - 'SET X read_permission Y WHERE X is CWRType, X name %(name)s, Y eid %(g)s', - 'SET X read_permission Y WHERE X is CWRType, X name %(name)s, Y eid %(g)s', - 'SET X add_permission Y WHERE X is CWRType, X name %(name)s, Y eid %(g)s', - 'SET X add_permission Y WHERE X is CWRType, X name %(name)s, Y eid %(g)s', - 'SET X add_permission Y WHERE X is CWRType, X name %(name)s, Y eid %(g)s', - 'SET X delete_permission Y WHERE X is CWRType, X name %(name)s, Y eid %(g)s', - 'SET X delete_permission Y WHERE X is CWRType, X name %(name)s, Y eid %(g)s', - 'SET X delete_permission Y WHERE X is CWRType, X name %(name)s, Y eid %(g)s', + self.assertListEquals([(rql, kwargs) for rql, kwargs in erperms2rql(schema.rschema('name').rdef('CWEType', 'String'), self.GROUP_MAPPING)], + [('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 0}), + ('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 1}), + ('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 2}), + ('SET X add_permission Y WHERE Y eid %(g)s, ', {'g': 0}), + ('SET X add_permission Y WHERE Y eid %(g)s, ', {'g': 1}), + ('SET X delete_permission Y WHERE Y eid %(g)s, ', {'g': 0}), + ('SET X delete_permission Y WHERE Y eid %(g)s, ', {'g': 1}), ]) #def test_perms2rql(self): diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_security.py --- a/server/test/unittest_security.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/unittest_security.py Wed Jan 20 10:13:45 2010 +0100 @@ -4,23 +4,23 @@ import sys from logilab.common.testlib import unittest_main, TestCase -from cubicweb.devtools.apptest import RepositoryBasedTC +from cubicweb.devtools.testlib import CubicWebTC from cubicweb import Unauthorized, ValidationError from cubicweb.server.querier import check_read_access -class BaseSecurityTC(RepositoryBasedTC): +class BaseSecurityTC(CubicWebTC): def setUp(self): - RepositoryBasedTC.setUp(self) + CubicWebTC.setUp(self) self.create_user('iaminusersgrouponly') - self.readoriggroups = self.schema['Personne'].get_groups('read') - self.addoriggroups = self.schema['Personne'].get_groups('add') + self.readoriggroups = self.schema['Personne'].permissions['read'] + self.addoriggroups = self.schema['Personne'].permissions['add'] def tearDown(self): - RepositoryBasedTC.tearDown(self) - self.schema['Personne'].set_groups('read', self.readoriggroups) - self.schema['Personne'].set_groups('add', self.addoriggroups) + CubicWebTC.tearDown(self) + self.schema['Personne'].set_action_permissions('read', self.readoriggroups) + self.schema['Personne'].set_action_permissions('add', self.addoriggroups) class LowLevelSecurityFunctionTC(BaseSecurityTC): @@ -29,7 +29,7 @@ rql = u'Personne U where U nom "managers"' rqlst = self.repo.vreg.rqlhelper.parse(rql).children[0] origgroups = self.schema['Personne'].get_groups('read') - self.schema['Personne'].set_groups('read', ('users', 'managers')) + self.schema['Personne'].set_action_permissions('read', ('users', 'managers')) self.repo.vreg.rqlhelper.compute_solutions(rqlst) solution = rqlst.solutions[0] check_read_access(self.schema, self.session.user, rqlst, solution) @@ -37,7 +37,7 @@ cu = cnx.cursor() self.assertRaises(Unauthorized, check_read_access, - self.schema, cnx.user(self.current_session()), rqlst, solution) + self.schema, cnx.user(self.session), rqlst, solution) self.assertRaises(Unauthorized, cu.execute, rql) def test_upassword_not_selectable(self): @@ -96,8 +96,8 @@ def test_update_security_2(self): cnx = self.login('anon') cu = cnx.cursor() - self.repo.schema['Personne'].set_groups('read', ('users', 'managers')) - self.repo.schema['Personne'].set_groups('add', ('guests', 'users', 'managers')) + self.repo.schema['Personne'].set_action_permissions('read', ('users', 'managers')) + self.repo.schema['Personne'].set_action_permissions('add', ('guests', 'users', 'managers')) self.assertRaises(Unauthorized, cu.execute, "SET X nom 'bidulechouette' WHERE X is Personne") #self.assertRaises(Unauthorized, cnx.commit) # test nothing has actually been inserted @@ -165,7 +165,7 @@ def test_insert_relation_rql_permission(self): cnx = self.login('iaminusersgrouponly') - session = self.current_session() + session = self.session cu = cnx.cursor(session) cu.execute("SET A concerne S WHERE A is Affaire, S is Societe") # should raise Unauthorized since user don't own S @@ -177,7 +177,7 @@ ent = rset.get_entity(0, 0) session.set_pool() # necessary self.assertRaises(Unauthorized, - ent.e_schema.check_perm, session, 'update', ent.eid) + ent.e_schema.check_perm, session, 'update', eid=ent.eid) self.assertRaises(Unauthorized, cu.execute, "SET P travaille S WHERE P is Personne, S is Societe") # test nothing has actually been inserted: @@ -210,17 +210,17 @@ def test_user_can_change_its_upassword(self): - ueid = self.create_user('user') + ueid = self.create_user('user').eid cnx = self.login('user') cu = cnx.cursor() cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', {'x': ueid, 'passwd': 'newpwd'}, 'x') cnx.commit() cnx.close() - cnx = self.login('user', 'newpwd') + cnx = self.login('user', password='newpwd') def test_user_cant_change_other_upassword(self): - ueid = self.create_user('otheruser') + ueid = self.create_user('otheruser').eid cnx = self.login('iaminusersgrouponly') cu = cnx.cursor() cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', @@ -230,7 +230,7 @@ # read security test def test_read_base(self): - self.schema['Personne'].set_groups('read', ('users', 'managers')) + self.schema['Personne'].set_action_permissions('read', ('users', 'managers')) cnx = self.login('anon') cu = cnx.cursor() self.assertRaises(Unauthorized, @@ -281,7 +281,7 @@ self.execute("INSERT Personne X: X nom 'bidule'") self.execute("INSERT Societe X: X nom 'bidule'") self.commit() - self.schema['Personne'].set_groups('read', ('managers',)) + self.schema['Personne'].set_action_permissions('read', ('managers',)) cnx = self.login('iaminusersgrouponly') cu = cnx.cursor() rset = cu.execute('Any N WHERE N has_text "bidule"') @@ -293,7 +293,7 @@ self.execute("INSERT Personne X: X nom 'bidule'") self.execute("INSERT Societe X: X nom 'bidule'") self.commit() - self.schema['Personne'].set_groups('read', ('managers',)) + self.schema['Personne'].set_action_permissions('read', ('managers',)) cnx = self.login('anon') cu = cnx.cursor() rset = cu.execute('Any N,U WHERE N has_text "bidule", N owned_by U?') @@ -371,8 +371,8 @@ def test_attribute_read_security(self): # anon not allowed to see users'login, but they can see users - self.repo.schema['CWUser'].set_groups('read', ('guests', 'users', 'managers')) - self.repo.schema['login'].set_groups('read', ('users', 'managers')) + self.repo.schema['CWUser'].set_action_permissions('read', ('guests', 'users', 'managers')) + self.repo.schema['CWUser'].rdef('login').set_action_permissions('read', ('users', 'managers')) cnx = self.login('anon') cu = cnx.cursor() rset = cu.execute('CWUser X') @@ -415,7 +415,7 @@ def test_users_and_groups_non_readable_by_guests(self): cnx = self.login('anon') - anon = cnx.user(self.current_session()) + anon = cnx.user(self.session) cu = cnx.cursor() # anonymous user can only read itself rset = cu.execute('Any L WHERE X owned_by U, U login L') @@ -425,7 +425,7 @@ # anonymous user can read groups (necessary to check allowed transitions for instance) self.assert_(cu.execute('CWGroup X')) # should only be able to read the anonymous user, not another one - origuser = self.session.user + origuser = self.adminsession.user self.assertRaises(Unauthorized, cu.execute, 'CWUser X WHERE X eid %(x)s', {'x': origuser.eid}, 'x') # nothing selected, nothing updated, no exception raised @@ -461,7 +461,7 @@ self.commit() cnx = self.login('anon') cu = cnx.cursor() - anoneid = self.current_session().user.eid + anoneid = self.session.user.eid self.assertEquals(cu.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,' 'B bookmarked_by U, U eid %s' % anoneid).rows, [['index', '?vid=index']]) @@ -490,27 +490,27 @@ eid = self.execute('INSERT Affaire X: X ref "ARCT01"')[0][0] self.commit() cnx = self.login('iaminusersgrouponly') - session = self.current_session() + session = self.session # needed to avoid check_perm error session.set_pool() # needed to remove rql expr granting update perm to the user - self.schema['Affaire'].set_rqlexprs('update', ()) + self.schema['Affaire'].set_action_permissions('update', self.schema['Affaire'].get_groups('update')) self.assertRaises(Unauthorized, - self.schema['Affaire'].check_perm, session, 'update', eid) + self.schema['Affaire'].check_perm, session, 'update', eid=eid) cu = cnx.cursor() - self.schema['Affaire'].set_groups('read', ('users',)) + self.schema['Affaire'].set_action_permissions('read', ('users',)) try: aff = cu.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0) aff.fire_transition('abort') cnx.commit() # though changing a user state (even logged user) is reserved to managers - user = cnx.user(self.current_session()) + user = cnx.user(self.session) # XXX wether it should raise Unauthorized or ValidationError is not clear # the best would probably ValidationError if the transition doesn't exist # from the current state but Unauthorized if it exists but user can't pass it self.assertRaises(ValidationError, user.fire_transition, 'deactivate') finally: - self.schema['Affaire'].set_groups('read', ('managers',)) + self.schema['Affaire'].set_action_permissions('read', ('managers',)) def test_trinfo_security(self): aff = self.execute('INSERT Affaire X: X ref "ARCT01"').get_entity(0, 0) diff -r 15d541321a8c -r 74c1597f8a82 server/test/unittest_ssplanner.py --- a/server/test/unittest_ssplanner.py Wed Jan 20 10:13:02 2010 +0100 +++ b/server/test/unittest_ssplanner.py Wed Jan 20 10:13:45 2010 +0100 @@ -10,7 +10,7 @@ from cubicweb.server.ssplanner import SSPlanner # keep cnx so it's not garbage collected and the associated session closed -repo, cnx = init_test_database('sqlite') +repo, cnx = init_test_database() class SSPlannerTC(BasePlannerTC): repo = repo diff -r 15d541321a8c -r 74c1597f8a82 sobjects/email.py --- a/sobjects/email.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,71 +0,0 @@ -"""hooks to ensure use_email / primary_email relations consistency - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -from cubicweb.server.hooksmanager import Hook -from cubicweb.server.pool import PreCommitOperation -from cubicweb.server.repository import ensure_card_respected - -class SetUseEmailRelationOp(PreCommitOperation): - """delay this operation to commit to avoid conflict with a late rql query - already setting the relation - """ - rtype = 'use_email' - fromeid = toeid = None # make pylint happy - - def condition(self): - """check entity has use_email set for the email address""" - return not self.session.unsafe_execute( - 'Any X WHERE X eid %(x)s, X use_email Y, Y eid %(y)s', - {'x': self.fromeid, 'y': self.toeid}, 'x') - - def precommit_event(self): - session = self.session - if self.condition(): - # we've to handle cardinaly by ourselves since we're using unsafe_execute - # but use session.execute and not session.unsafe_execute to check we - # can change the relation - ensure_card_respected(session.execute, session, - self.fromeid, self.rtype, self.toeid) - session.unsafe_execute( - 'SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % self.rtype, - {'x': self.fromeid, 'y': self.toeid}, 'x') - -class SetPrimaryEmailRelationOp(SetUseEmailRelationOp): - rtype = 'primary_email' - - def condition(self): - """check entity has no primary_email set""" - return not self.session.unsafe_execute( - 'Any X WHERE X eid %(x)s, X primary_email Y', - {'x': self.fromeid}, 'x') - - -class SetPrimaryEmailHook(Hook): - """notify when a bug or story or version has its state modified""" - events = ('after_add_relation',) - accepts = ('use_email',) - - def call(self, session, fromeid, rtype, toeid): - subjtype = session.describe(fromeid)[0] - eschema = self.vreg.schema[subjtype] - if 'primary_email' in eschema.subject_relations(): - SetPrimaryEmailRelationOp(session, vreg=self.vreg, - fromeid=fromeid, toeid=toeid) - -class SetUseEmailHook(Hook): - """notify when a bug or story or version has its state modified""" - events = ('after_add_relation',) - accepts = ('primary_email',) - - def call(self, session, fromeid, rtype, toeid): - subjtype = session.describe(fromeid)[0] - eschema = self.vreg.schema[subjtype] - if 'use_email' in eschema.subject_relations(): - SetUseEmailRelationOp(session, vreg=self.vreg, - fromeid=fromeid, toeid=toeid) diff -r 15d541321a8c -r 74c1597f8a82 sobjects/hooks.py --- a/sobjects/hooks.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,87 +0,0 @@ -"""various library content hooks - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" - -from datetime import datetime - -from cubicweb import RepositoryError -from cubicweb.common.uilib import soup2xhtml -from cubicweb.server.hooksmanager import Hook -from cubicweb.server.pool import PreCommitOperation - - -class SetModificationDateOnStateChange(Hook): - """update entity's modification date after changing its state""" - events = ('after_add_relation',) - accepts = ('in_state',) - - def call(self, session, fromeid, rtype, toeid): - if fromeid in session.transaction_data.get('neweids', ()): - # new entity, not needed - return - entity = session.entity_from_eid(fromeid) - try: - entity.set_attributes(modification_date=datetime.now(), - _cw_unsafe=True) - except RepositoryError, ex: - # usually occurs if entity is coming from a read-only source - # (eg ldap user) - self.warning('cant change modification date for %s: %s', entity, ex) - - -class AddUpdateCWUserHook(Hook): - """ensure user logins are stripped""" - events = ('before_add_entity', 'before_update_entity',) - accepts = ('CWUser',) - - def call(self, session, entity): - if 'login' in entity and entity['login']: - entity['login'] = entity['login'].strip() - - -class AutoDeleteBookmark(PreCommitOperation): - beid = None # make pylint happy - def precommit_event(self): - session = self.session - if not self.beid in session.transaction_data.get('pendingeids', ()): - if not session.unsafe_execute('Any X WHERE X bookmarked_by U, X eid %(x)s', - {'x': self.beid}, 'x'): - session.unsafe_execute('DELETE Bookmark X WHERE X eid %(x)s', - {'x': self.beid}, 'x') - -class DelBookmarkedByHook(Hook): - """ensure user logins are stripped""" - events = ('after_delete_relation',) - accepts = ('bookmarked_by',) - - def call(self, session, subj, rtype, obj): - AutoDeleteBookmark(session, beid=subj) - - -class TidyHtmlFields(Hook): - """tidy HTML in rich text strings""" - events = ('before_add_entity', 'before_update_entity') - accepts = ('Any',) - - def call(self, session, entity): - if session.is_super_session: - return - metaattrs = entity.e_schema.meta_attributes() - for metaattr, (metadata, attr) in metaattrs.iteritems(): - if metadata == 'format': - try: - value = entity[attr] - except KeyError: - continue # no text to tidy - if isinstance(value, unicode): # filter out None and Binary - if self.event == 'before_add_entity': - fmt = entity.get(metaattr) - else: - fmt = entity.get_value(metaattr) - if fmt == 'text/html': - entity[attr] = soup2xhtml(value, session.encoding) diff -r 15d541321a8c -r 74c1597f8a82 sobjects/notification.py --- a/sobjects/notification.py Wed Jan 20 10:13:02 2010 +0100 +++ b/sobjects/notification.py Wed Jan 20 10:13:45 2010 +0100 @@ -1,4 +1,4 @@ -"""some hooks and views to handle notification on entity's changes +"""some views to handle notification on data changes :organization: Logilab :copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. @@ -13,15 +13,10 @@ from logilab.common.textutils import normalize_text from logilab.common.deprecation import class_renamed, deprecated -from cubicweb import RegistryException -from cubicweb.selectors import implements, yes +from cubicweb.selectors import yes from cubicweb.view import Component -from cubicweb.common.mail import NotificationView, parse_message_id, SkipEmail -from cubicweb.server.pool import PreCommitOperation, SingleLastOperation -from cubicweb.server.hookhelper import SendMailOp -from cubicweb.server.hooksmanager import Hook - -parse_message_id = deprecated('parse_message_id is now defined in cubicweb.common.mail')(parse_message_id) +from cubicweb.mail import NotificationView, SkipEmail +from cubicweb.server.hook import SendMailOp class RecipientsFinder(Component): @@ -30,155 +25,40 @@ by default user's with their email set are notified if any, else the default email addresses specified in the configuration are used """ - id = 'recipients_finder' + __regid__ = 'recipients_finder' __select__ = yes() user_rql = ('Any X,E,A WHERE X is CWUser, X in_state S, S name "activated",' 'X primary_email E, E address A') def recipients(self): - mode = self.config['default-recipients-mode'] + mode = self._cw.vreg.config['default-recipients-mode'] if mode == 'users': # use unsafe execute else we may don't have the right to see users # to notify... - execute = self.req.unsafe_execute + execute = self._cw.unsafe_execute dests = [(u.get_email(), u.property_value('ui.language')) for u in execute(self.user_rql, build_descr=True, propagate=True).entities()] elif mode == 'default-dest-addrs': - lang = self.vreg.property_value('ui.language') - dests = zip(self.config['default-dest-addrs'], repeat(lang)) + lang = self._cw.vreg.property_value('ui.language') + dests = zip(self._cw.vreg.config['default-dest-addrs'], repeat(lang)) else: # mode == 'none' dests = [] return dests -# hooks ####################################################################### - -class EntityUpdatedNotificationOp(SingleLastOperation): - - def precommit_event(self): - session = self.session - for eid in session.transaction_data['changes']: - view = session.vreg['views'].select('notif_entity_updated', session, - rset=session.eid_rset(eid), - row=0) - RenderAndSendNotificationView(session, view=view) - - def commit_event(self): - pass - - -class RenderAndSendNotificationView(PreCommitOperation): - """delay rendering of notification view until precommit""" - def precommit_event(self): - view = self.view - if view.rset is not None and not view.rset: - return # entity added and deleted in the same transaction (cache effect) - if view.rset and view.rset[0][0] in self.session.transaction_data.get('pendingeids', ()): - return # entity added and deleted in the same transaction - self.view.render_and_send(**getattr(self, 'viewargs', {})) - - -class StatusChangeHook(Hook): - """notify when a workflowable entity has its state modified""" - events = ('after_add_entity',) - accepts = ('TrInfo',) - - def call(self, session, entity): - if not entity.from_state: # not a transition - return - rset = entity.related('wf_info_for') - try: - view = session.vreg['views'].select('notif_status_change', session, - rset=rset, row=0) - except RegistryException: - return - comment = entity.printable_value('comment', format='text/plain') - # XXX don't try to wrap rest until we've a proper transformation (see - # #103822) - if comment and entity.comment_format != 'text/rest': - comment = normalize_text(comment, 80) - RenderAndSendNotificationView(session, view=view, viewargs={ - 'comment': comment, 'previous_state': entity.previous_state.name, - 'current_state': entity.new_state.name}) - - -class RelationChangeHook(Hook): - events = ('before_add_relation', 'after_add_relation', - 'before_delete_relation', 'after_delete_relation') - accepts = ('Any',) - def call(self, session, fromeid, rtype, toeid): - """if a notification view is defined for the event, send notification - email defined by the view - """ - rset = session.eid_rset(fromeid) - vid = 'notif_%s_%s' % (self.event, rtype) - try: - view = session.vreg['views'].select(vid, session, rset=rset, row=0) - except RegistryException: - return - RenderAndSendNotificationView(session, view=view) - - -class EntityChangeHook(Hook): - events = ('after_add_entity', - 'after_update_entity') - accepts = ('Any',) - def call(self, session, entity): - """if a notification view is defined for the event, send notification - email defined by the view - """ - rset = entity.as_rset() - vid = 'notif_%s' % self.event - try: - view = session.vreg['views'].select(vid, session, rset=rset, row=0) - except RegistryException: - return - RenderAndSendNotificationView(session, view=view) - -class EntityUpdateHook(Hook): - events = ('before_update_entity',) - accepts = () - skip_attrs = set() - - def call(self, session, entity): - if entity.eid in session.transaction_data.get('neweids', ()): - return # entity is being created - if session.is_super_session: - return # ignore changes triggered by hooks - # then compute changes - changes = session.transaction_data.setdefault('changes', {}) - thisentitychanges = changes.setdefault(entity.eid, set()) - attrs = [k for k in entity.edited_attributes if not k in self.skip_attrs] - if not attrs: - return - rqlsel, rqlrestr = [], ['X eid %(x)s'] - for i, attr in enumerate(attrs): - var = chr(65+i) - rqlsel.append(var) - rqlrestr.append('X %s %s' % (attr, var)) - rql = 'Any %s WHERE %s' % (','.join(rqlsel), ','.join(rqlrestr)) - rset = session.execute(rql, {'x': entity.eid}, 'x') - for i, attr in enumerate(attrs): - oldvalue = rset[0][i] - newvalue = entity[attr] - if oldvalue != newvalue: - thisentitychanges.add((attr, oldvalue, newvalue)) - if thisentitychanges: - EntityUpdatedNotificationOp(session) - # abstract or deactivated notification views and mixin ######################## class NotificationView(NotificationView): """overriden to delay actual sending of mails to a commit operation by default """ - def send_on_commit(self, recipients, msg): - SendMailOp(self.req, recipients=recipients, msg=msg) + SendMailOp(self._cw, recipients=recipients, msg=msg) send = send_on_commit + class StatusChangeMixIn(object): - id = 'notif_status_change' + __regid__ = 'notif_status_change' msgid_timestamp = True message = _('status changed') content = _(""" @@ -208,7 +88,7 @@ override call) """ __abstract__ = True - id = 'notif_after_add_entity' + __regid__ = 'notif_after_add_entity' msgid_timestamp = False message = _('new') content = """ @@ -220,7 +100,7 @@ """ def context(self, **kwargs): - entity = self.entity(self.row or 0, self.col or 0) + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) content = entity.printable_value(self.content_attr, format='text/plain') if content: contentformat = getattr(entity, self.content_attr + '_format', @@ -232,17 +112,17 @@ return super(ContentAddedView, self).context(content=content, **kwargs) def subject(self): - entity = self.entity(self.row or 0, self.col or 0) - return u'%s #%s (%s)' % (self.req.__('New %s' % entity.e_schema), + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + return u'%s #%s (%s)' % (self._cw.__('New %s' % entity.e_schema), entity.eid, self.user_data['login']) -NormalizedTextView = class_renamed('NormalizedTextView', ContentAddedView) def format_value(value): if isinstance(value, unicode): return u'"%s"' % value return value + class EntityUpdatedNotificationView(NotificationView): """abstract class for notification on entity/relation @@ -266,17 +146,19 @@ def context(self, **kwargs): context = super(EntityUpdatedNotificationView, self).context(**kwargs) - changes = self.req.transaction_data['changes'][self.rset[0][0]] - _ = self.req._ + changes = self._cw.transaction_data['changes'][self.cw_rset[0][0]] + _ = self._cw._ formatted_changes = [] + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) for attr, oldvalue, newvalue in sorted(changes): # check current user has permission to see the attribute - rschema = self.vreg.schema[attr] + rschema = self._cw.vreg.schema[attr] if rschema.final: - if not rschema.has_perm(self.req, 'read', eid=self.rset[0][0]): + rdef = entity.e_schema.rdef(rschema) + if not rdef.has_perm(self._cw, 'read', eid=self.cw_rset[0][0]): continue # XXX suppose it's a subject relation... - elif not rschema.has_perm(self.req, 'read', fromeid=self.rset[0][0]): + elif not rschema.has_perm(self._cw, 'read', fromeid=self.cw_rset[0][0]): # XXX toeid continue if attr in self.no_detailed_change_attrs: msg = _('%s updated') % _(attr) @@ -296,6 +178,16 @@ return context def subject(self): - entity = self.entity(self.row or 0, self.col or 0) - return u'%s #%s (%s)' % (self.req.__('Updated %s' % entity.e_schema), + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + return u'%s #%s (%s)' % (self._cw.__('Updated %s' % entity.e_schema), entity.eid, self.user_data['login']) + + +from logilab.common.deprecation import class_renamed, class_moved, deprecated +from cubicweb.hooks.notification import RenderAndSendNotificationView +from cubicweb.mail import parse_message_id + +NormalizedTextView = class_renamed('NormalizedTextView', ContentAddedView) +RenderAndSendNotificationView = class_moved(RenderAndSendNotificationView) +parse_message_id = deprecated('parse_message_id is now defined in cubicweb.mail')(parse_message_id) + diff -r 15d541321a8c -r 74c1597f8a82 sobjects/supervising.py --- a/sobjects/supervising.py Wed Jan 20 10:13:02 2010 +0100 +++ b/sobjects/supervising.py Wed Jan 20 10:13:45 2010 +0100 @@ -12,59 +12,8 @@ from cubicweb.selectors import none_rset from cubicweb.schema import display_name from cubicweb.view import Component -from cubicweb.common.mail import format_mail -from cubicweb.server.hooksmanager import Hook -from cubicweb.server.hookhelper import SendMailOp - - -class SomethingChangedHook(Hook): - events = ('before_add_relation', 'before_delete_relation', - 'after_add_entity', 'before_update_entity') - accepts = ('Any',) - - def call(self, session, *args): - if session.is_super_session or session.repo.config.repairing: - return # ignore changes triggered by hooks or maintainance shell - dest = self.config['supervising-addrs'] - if not dest: # no supervisors, don't do this for nothing... - return - self.session = session - if self._call(*args): - SupervisionMailOp(session) - - def _call(self, *args): - if self._event() == 'update_entity': - if args[0].eid in self.session.transaction_data.get('neweids', ()): - return False - if args[0].e_schema == 'CWUser': - updated = set(args[0].iterkeys()) - if not (updated - frozenset(('eid', 'modification_date', - 'last_login_time'))): - # don't record last_login_time update which are done - # automatically at login time - return False - self.session.transaction_data.setdefault('pendingchanges', []).append( - (self._event(), args)) - return True - - def _event(self): - return self.event.split('_', 1)[1] - - -class EntityDeleteHook(SomethingChangedHook): - events = ('before_delete_entity',) - - def _call(self, eid): - entity = self.session.entity_from_eid(eid) - try: - title = entity.dc_title() - except: - # may raise an error during deletion process, for instance due to - # missing required relation - title = '#%s' % eid - self.session.transaction_data.setdefault('pendingchanges', []).append( - ('delete_entity', (eid, str(entity.e_schema), title))) - return True +from cubicweb.mail import format_mail +from cubicweb.server.hook import SendMailOp def filter_changes(changes): @@ -81,7 +30,7 @@ for change in changes[:]: event, changedescr = change if event == 'add_entity': - entity = changedescr[0] + entity = changedescr.entity added.add(entity.eid) if entity.e_schema == 'TrInfo': changes.remove(change) @@ -95,7 +44,7 @@ index.setdefault(event, set()).add(change) for key in ('delete_relation', 'add_relation'): for change in index.get(key, {}).copy(): - if change[1][1] == 'in_state': + if change[1].rtype == 'in_state': index[key].remove(change) # filter changes for eid in added: @@ -104,26 +53,24 @@ changedescr = change[1] # skip meta-relations which are set automatically # XXX generate list below using rtags (category = 'generated') - if changedescr[1] in ('created_by', 'owned_by', 'is', 'is_instance_of', + if changedescr.rtype in ('created_by', 'owned_by', 'is', 'is_instance_of', 'from_state', 'to_state', 'by_transition', 'wf_info_for') \ - and changedescr[0] == eid: + and changedescr.eidfrom == eid: index['add_relation'].remove(change) - except KeyError: break for eid in deleted: try: for change in index['delete_relation'].copy(): - fromeid, rtype, toeid = change[1] - if fromeid == eid: + if change[1].eidfrom == eid: index['delete_relation'].remove(change) - elif toeid == eid: + elif change[1].eidto == eid: index['delete_relation'].remove(change) - if rtype == 'wf_info_for': - for change in index['delete_entity'].copy(): - if change[1][0] == fromeid: - index['delete_entity'].remove(change) + if change[1].rtype == 'wf_info_for': + for change_ in index['delete_entity'].copy(): + if change_[1].eidfrom == change[1].eidfrom: + index['delete_entity'].remove(change_) except KeyError: break for change in changes: @@ -135,22 +82,22 @@ class SupervisionEmailView(Component): """view implementing the email API for data changes supervision notification """ + __regid__ = 'supervision_notif' __select__ = none_rset() - id = 'supervision_notif' def recipients(self): - return self.config['supervising-addrs'] + return self._cw.vreg.config['supervising-addrs'] def subject(self): - return self.req._('[%s supervision] changes summary') % self.config.appid + return self._cw._('[%s supervision] changes summary') % self._cw.vreg.config.appid def call(self, changes): - user = self.req.actual_session().user - self.w(self.req._('user %s has made the following change(s):\n\n') + user = self._cw.actual_session().user + self.w(self._cw._('user %s has made the following change(s):\n\n') % user.login) for event, changedescr in filter_changes(changes): self.w(u'* ') - getattr(self, event)(*changedescr) + getattr(self, event)(changedescr) self.w(u'\n\n') def _entity_context(self, entity): @@ -158,32 +105,32 @@ 'etype': entity.dc_type().lower(), 'title': entity.dc_title()} - def add_entity(self, entity): - msg = self.req._('added %(etype)s #%(eid)s (%(title)s)') - self.w(u'%s\n' % (msg % self._entity_context(entity))) - self.w(u' %s' % entity.absolute_url()) + def add_entity(self, changedescr): + msg = self._cw._('added %(etype)s #%(eid)s (%(title)s)') + self.w(u'%s\n' % (msg % self._entity_context(changedescr.entity))) + self.w(u' %s' % changedescr.entity.absolute_url()) - def update_entity(self, entity): - msg = self.req._('updated %(etype)s #%(eid)s (%(title)s)') - self.w(u'%s\n' % (msg % self._entity_context(entity))) + def update_entity(self, changedescr): + msg = self._cw._('updated %(etype)s #%(eid)s (%(title)s)') + self.w(u'%s\n' % (msg % self._entity_context(changedescr.entity))) # XXX print changes - self.w(u' %s' % entity.absolute_url()) + self.w(u' %s' % changedescr.entity.absolute_url()) - def delete_entity(self, eid, etype, title): - msg = self.req._('deleted %(etype)s #%(eid)s (%(title)s)') - etype = display_name(self.req, etype).lower() + def delete_entity(self, (eid, etype, title)): + msg = self._cw._('deleted %(etype)s #%(eid)s (%(title)s)') + etype = display_name(self._cw, etype).lower() self.w(msg % locals()) - def change_state(self, entity, fromstate, tostate): - msg = self.req._('changed state of %(etype)s #%(eid)s (%(title)s)') + def change_state(self, (entity, fromstate, tostate)): + msg = self._cw._('changed state of %(etype)s #%(eid)s (%(title)s)') self.w(u'%s\n' % (msg % self._entity_context(entity))) self.w(_(' from state %(fromstate)s to state %(tostate)s\n' % {'fromstate': _(fromstate.name), 'tostate': _(tostate.name)})) self.w(u' %s' % entity.absolute_url()) - def _relation_context(self, fromeid, rtype, toeid): - _ = self.req._ - session = self.req.actual_session() + def _relation_context(self, changedescr): + _ = self._cw._ + session = self._cw.actual_session() def describe(eid): try: return _(session.describe(eid)[0]).lower() @@ -191,19 +138,20 @@ # may occurs when an entity has been deleted from an external # source and we're cleaning its relation return _('unknown external entity') + eidfrom, rtype, eidto = changedescr.eidfrom, changedescr.rtype, changedescr.eidto return {'rtype': _(rtype), - 'fromeid': fromeid, - 'frometype': describe(fromeid), - 'toeid': toeid, - 'toetype': describe(toeid)} + 'eidfrom': eidfrom, + 'frometype': describe(eidfrom), + 'eidto': eidto, + 'toetype': describe(eidto)} - def add_relation(self, fromeid, rtype, toeid): - msg = self.req._('added relation %(rtype)s from %(frometype)s #%(fromeid)s to %(toetype)s #%(toeid)s') - self.w(msg % self._relation_context(fromeid, rtype, toeid)) + def add_relation(self, changedescr): + msg = self._cw._('added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #%(eidto)s') + self.w(msg % self._relation_context(changedescr)) - def delete_relation(self, fromeid, rtype, toeid): - msg = self.req._('deleted relation %(rtype)s from %(frometype)s #%(fromeid)s to %(toetype)s #%(toeid)s') - self.w(msg % self._relation_context(fromeid, rtype, toeid)) + def delete_relation(self, changedescr): + msg = self._cw._('deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #%(eidto)s') + self.w(msg % self._relation_context(changedescr)) class SupervisionMailOp(SendMailOp): diff -r 15d541321a8c -r 74c1597f8a82 sobjects/test/unittest_email.py --- a/sobjects/test/unittest_email.py Wed Jan 20 10:13:02 2010 +0100 +++ b/sobjects/test/unittest_email.py Wed Jan 20 10:13:45 2010 +0100 @@ -5,10 +5,11 @@ :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ + from cubicweb import Unauthorized -from cubicweb.devtools.apptest import EnvBasedTC +from cubicweb.devtools.testlib import CubicWebTC -class EmailAddressHooksTC(EnvBasedTC): +class EmailAddressHooksTC(CubicWebTC): def test_use_email_set_primary_email(self): self.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U use_email X WHERE U login "admin"') diff -r 15d541321a8c -r 74c1597f8a82 sobjects/test/unittest_hooks.py --- a/sobjects/test/unittest_hooks.py Wed Jan 20 10:13:02 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,37 +0,0 @@ -""" - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -from logilab.common.testlib import unittest_main -from cubicweb.devtools.apptest import EnvBasedTC - -class HooksTC(EnvBasedTC): - - def test_euser_login_stripped(self): - u = self.create_user(' joe ') - tname = self.execute('Any L WHERE E login L, E eid %(e)s', - {'e': u.eid})[0][0] - self.assertEquals(tname, 'joe') - self.execute('SET X login " jijoe " WHERE X eid %(x)s', {'x': u.eid}) - tname = self.execute('Any L WHERE E login L, E eid %(e)s', - {'e': u.eid})[0][0] - self.assertEquals(tname, 'jijoe') - - - def test_auto_delete_bookmarks(self): - beid = self.execute('INSERT Bookmark X: X title "hop", X path "view", X bookmarked_by U ' - 'WHERE U login "admin"')[0][0] - self.execute('SET X bookmarked_by U WHERE U login "anon"') - self.commit() - self.execute('DELETE X bookmarked_by U WHERE U login "admin"') - self.commit() - self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': beid}, 'x')) - self.execute('DELETE X bookmarked_by U WHERE U login "anon"') - self.commit() - self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': beid}, 'x')) - -if __name__ == '__main__': - unittest_main() diff -r 15d541321a8c -r 74c1597f8a82 sobjects/test/unittest_notification.py --- a/sobjects/test/unittest_notification.py Wed Jan 20 10:13:02 2010 +0100 +++ b/sobjects/test/unittest_notification.py Wed Jan 20 10:13:45 2010 +0100 @@ -9,9 +9,9 @@ from socket import gethostname from logilab.common.testlib import unittest_main, TestCase -from cubicweb.devtools.apptest import EnvBasedTC, MAILBOX +from cubicweb.devtools.testlib import CubicWebTC, MAILBOX -from cubicweb.common.mail import construct_message_id, parse_message_id +from cubicweb.mail import construct_message_id, parse_message_id class MessageIdTC(TestCase): def test_base(self): @@ -48,7 +48,7 @@ self.assertNotEquals(msgid1, '<@testapp.%s>' % gethostname()) -class RecipientsFinderTC(EnvBasedTC): +class RecipientsFinderTC(CubicWebTC): def test(self): urset = self.execute('CWUser X WHERE X login "admin"') self.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X ' @@ -67,12 +67,11 @@ self.assertEquals(finder.recipients(), [('abcd@logilab.fr', 'en'), ('efgh@logilab.fr', 'en')]) -class StatusChangeViewsTC(EnvBasedTC): +class StatusChangeViewsTC(CubicWebTC): def test_status_change_view(self): - req = self.session() - u = self.create_user('toto', req=req)#, commit=False) XXX in cw 3.6, and remove set_pool - req.set_pool() + req = self.request() + u = self.create_user('toto', req=req) u.fire_transition('deactivate', comment=u'yeah') self.failIf(MAILBOX) self.commit() diff -r 15d541321a8c -r 74c1597f8a82 sobjects/test/unittest_supervising.py --- a/sobjects/test/unittest_supervising.py Wed Jan 20 10:13:02 2010 +0100 +++ b/sobjects/test/unittest_supervising.py Wed Jan 20 10:13:45 2010 +0100 @@ -9,24 +9,25 @@ import re from logilab.common.testlib import unittest_main -from cubicweb.devtools.apptest import EnvBasedTC +from cubicweb.devtools.testlib import CubicWebTC from cubicweb.sobjects.supervising import SendMailOp, SupervisionMailOp -class SupervisingTC(EnvBasedTC): +class SupervisingTC(CubicWebTC): def setup_database(self): - self.add_entity('Card', title=u"une news !", content=u"cubicweb c'est beau") - self.add_entity('Card', title=u"une autre news !", content=u"cubicweb c'est beau") - self.add_entity('Bookmark', title=u"un signet !", path=u"view?vid=index") - self.add_entity('Comment', content=u"Yo !") + req = self.request() + req.create_entity('Card', title=u"une news !", content=u"cubicweb c'est beau") + req.create_entity('Card', title=u"une autre news !", content=u"cubicweb c'est beau") + req.create_entity('Bookmark', title=u"un signet !", path=u"view?vid=index") + req.create_entity('Comment', content=u"Yo !") self.execute('SET C comments B WHERE B title "une autre news !", C content "Yo !"') self.vreg.config.global_set_option('supervising-addrs', 'test@logilab.fr') def test_supervision(self): - session = self.session() + session = self.session # do some modification user = self.execute('INSERT CWUser X: X login "toto", X upassword "sosafe", X in_group G ' 'WHERE G name "users"').get_entity(0, 0) @@ -88,7 +89,7 @@ data) def test_nonregr1(self): - session = self.session() + session = self.session # do some unlogged modification self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': session.user.eid}, 'x') self.commit() # no crash diff -r 15d541321a8c -r 74c1597f8a82 sobjects/textparsers.py --- a/sobjects/textparsers.py Wed Jan 20 10:13:02 2010 +0100 +++ b/sobjects/textparsers.py Wed Jan 20 10:13:45 2010 +0100 @@ -21,11 +21,11 @@ """analyze and extract information from plain text by calling registered text parsers """ - id = 'textanalyzer' + __regid__ = 'textanalyzer' def parse(self, caller, text): - for parsercls in self.req.vreg['components'].get('textparser', ()): - parsercls(self.req).parse(caller, text) + for parsercls in self._cw.vreg['components'].get('textparser', ()): + parsercls(self._cw).parse(caller, text) class TextParser(Component): @@ -36,7 +36,7 @@ method on the caller. """ - id = 'textparser' + __regid__ = 'textparser' __abstract__ = True def parse(self, caller, text): @@ -53,7 +53,7 @@ def parse(self, caller, text): for trname, eid in self.instr_rgx.findall(text): try: - entity = self.req.entity_from_eid(typed_eid(eid)) + entity = self._cw.entity_from_eid(typed_eid(eid)) except UnknownEid: self.error("can't get entity with eid %s", eid) continue diff -r 15d541321a8c -r 74c1597f8a82 tags.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tags.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,49 @@ +"""helper classes to generate simple (X)HTML tags + +:organization: Logilab +:copyright: 2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +from cubicweb.uilib import simple_sgml_tag, sgml_attributes + +class tag(object): + def __init__(self, name, escapecontent=True): + self.name = name + self.escapecontent = escapecontent + + def __call__(self, __content=None, **attrs): + attrs.setdefault('escapecontent', self.escapecontent) + return simple_sgml_tag(self.name, __content, **attrs) + +button = tag('button') +input = tag('input') +textarea = tag('textarea') +a = tag('a') +span = tag('span') +div = tag('div', False) +img = tag('img') +label = tag('label') +option = tag('option') +h1 = tag('h1') +h2 = tag('h2') +h3 = tag('h3') +h4 = tag('h4') +h5 = tag('h5') +tr = tag('tr') +th = tag('th') +td = tag('td') + +def select(name, id=None, multiple=False, options=[], **attrs): + if multiple: + attrs['multiple'] = 'multiple' + if id: + attrs['id'] = id + attrs['name'] = name + html = [u'') + return u'\n'.join(html) + diff -r 15d541321a8c -r 74c1597f8a82 test/data/entities.py --- a/test/data/entities.py Wed Jan 20 10:13:02 2010 +0100 +++ b/test/data/entities.py Wed Jan 20 10:13:45 2010 +0100 @@ -8,15 +8,15 @@ from cubicweb.entities import AnyEntity, fetch_config class Societe(AnyEntity): - id = 'Societe' + __regid__ = 'Societe' fetch_attrs = ('nom',) class Personne(Societe): """customized class forne Person entities""" - id = 'Personne' + __regid__ = 'Personne' fetch_attrs, fetch_order = fetch_config(['nom', 'prenom']) rest_attr = 'nom' class Note(AnyEntity): - id = 'Note' + __regid__ = 'Note' diff -r 15d541321a8c -r 74c1597f8a82 test/data/erqlexpr_on_ertype.py --- a/test/data/erqlexpr_on_ertype.py Wed Jan 20 10:13:02 2010 +0100 +++ b/test/data/erqlexpr_on_ertype.py Wed Jan 20 10:13:45 2010 +0100 @@ -9,7 +9,7 @@ from cubicweb.schema import ERQLExpression class ToTo(EntityType): - permissions = { + __permissions__ = { 'read': ('managers',), 'add': ('managers',), 'update': ('managers',), @@ -18,7 +18,7 @@ toto = SubjectRelation('TuTu') class TuTu(EntityType): - permissions = { + __permissions__ = { 'read': ('managers',), 'add': ('managers',), 'update': ('managers',), @@ -26,7 +26,7 @@ } class toto(RelationType): - permissions = { + __permissions__ = { 'read': ('managers', ), 'add': ('managers', ERQLExpression('S bla Y'),), 'delete': ('managers',), diff -r 15d541321a8c -r 74c1597f8a82 test/data/migration/0.0.3_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/migration/0.0.3_Any.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,8 @@ +""" + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +coucou diff -r 15d541321a8c -r 74c1597f8a82 test/data/migration/0.0.4_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/migration/0.0.4_Any.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,8 @@ +""" + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +coucou diff -r 15d541321a8c -r 74c1597f8a82 test/data/migration/0.1.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/migration/0.1.0_Any.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,8 @@ +""" + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +coucou diff -r 15d541321a8c -r 74c1597f8a82 test/data/migration/0.1.0_common.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/migration/0.1.0_common.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,7 @@ +"""common to all configuration + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" diff -r 15d541321a8c -r 74c1597f8a82 test/data/migration/0.1.0_repository.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/migration/0.1.0_repository.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,7 @@ +"""repository specific + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" diff -r 15d541321a8c -r 74c1597f8a82 test/data/migration/0.1.0_web.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/migration/0.1.0_web.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,7 @@ +"""web only + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" diff -r 15d541321a8c -r 74c1597f8a82 test/data/migration/0.1.2_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/migration/0.1.2_Any.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,8 @@ +""" + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +coucou diff -r 15d541321a8c -r 74c1597f8a82 test/data/migration/depends.map --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/migration/depends.map Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,5 @@ +0.0.2: 2.3.0 +0.0.3: 2.4.0 +# missing 0.0.4 entry, that's alright +0.1.0: 2.6.0 +0.1.2: 2.10.0 diff -r 15d541321a8c -r 74c1597f8a82 test/data/rewrite/schema.py --- a/test/data/rewrite/schema.py Wed Jan 20 10:13:02 2010 +0100 +++ b/test/data/rewrite/schema.py Wed Jan 20 10:13:45 2010 +0100 @@ -2,7 +2,7 @@ from cubicweb.schema import ERQLExpression class Affaire(EntityType): - permissions = { + __permissions__ = { 'read': ('managers', ERQLExpression('X owned_by U'), ERQLExpression('X concerne S?, S owned_by U')), 'add': ('managers', ERQLExpression('X concerne S, S owned_by U')), @@ -15,7 +15,7 @@ class Societe(EntityType): - permissions = { + __permissions__ = { 'read': ('managers', 'users', 'guests'), 'update': ('managers', 'owners', ERQLExpression('U login L, X nom L')), 'delete': ('managers', 'owners', ERQLExpression('U login L, X nom L')), diff -r 15d541321a8c -r 74c1597f8a82 test/data/rqlexpr_on_ertype_read.py --- a/test/data/rqlexpr_on_ertype_read.py Wed Jan 20 10:13:02 2010 +0100 +++ b/test/data/rqlexpr_on_ertype_read.py Wed Jan 20 10:13:45 2010 +0100 @@ -9,7 +9,7 @@ from cubicweb.schema import RRQLExpression class ToTo(EntityType): - permissions = { + __permissions__ = { 'read': ('managers',), 'add': ('managers',), 'update': ('managers',), @@ -18,7 +18,7 @@ toto = SubjectRelation('TuTu') class TuTu(EntityType): - permissions = { + __permissions__ = { 'read': ('managers',), 'add': ('managers',), 'update': ('managers',), @@ -26,7 +26,7 @@ } class toto(RelationType): - permissions = { + __permissions__ = { 'read': ('managers', RRQLExpression('S bla Y'), ), 'add': ('managers',), 'delete': ('managers',), diff -r 15d541321a8c -r 74c1597f8a82 test/data/rrqlexpr_on_attr.py --- a/test/data/rrqlexpr_on_attr.py Wed Jan 20 10:13:02 2010 +0100 +++ b/test/data/rrqlexpr_on_attr.py Wed Jan 20 10:13:45 2010 +0100 @@ -9,7 +9,7 @@ from cubicweb.schema import RRQLExpression class ToTo(EntityType): - permissions = { + __permissions__ = { 'read': ('managers',), 'add': ('managers',), 'update': ('managers',), @@ -18,7 +18,7 @@ attr = String() class attr(RelationType): - permissions = { + __permissions__ = { 'read': ('managers', ), 'add': ('managers', RRQLExpression('S bla Y'),), 'delete': ('managers',), diff -r 15d541321a8c -r 74c1597f8a82 test/data/rrqlexpr_on_eetype.py --- a/test/data/rrqlexpr_on_eetype.py Wed Jan 20 10:13:02 2010 +0100 +++ b/test/data/rrqlexpr_on_eetype.py Wed Jan 20 10:13:45 2010 +0100 @@ -9,7 +9,7 @@ from cubicweb.schema import RRQLExpression class ToTo(EntityType): - permissions = { + __permissions__ = { 'read': ('managers', RRQLExpression('S bla Y'),), 'add': ('managers',), 'update': ('managers',), diff -r 15d541321a8c -r 74c1597f8a82 test/data/server_migration/2.10.2_Any.sql diff -r 15d541321a8c -r 74c1597f8a82 test/data/server_migration/2.5.0_Any.sql diff -r 15d541321a8c -r 74c1597f8a82 test/data/server_migration/2.6.0_Any.sql diff -r 15d541321a8c -r 74c1597f8a82 test/data/server_migration/bootstrapmigration_repository.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/server_migration/bootstrapmigration_repository.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,7 @@ +"""allways executed before all others in server migration + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" diff -r 15d541321a8c -r 74c1597f8a82 test/unittest_cwconfig.py --- a/test/unittest_cwconfig.py Wed Jan 20 10:13:02 2010 +0100 +++ b/test/unittest_cwconfig.py Wed Jan 20 10:13:45 2010 +0100 @@ -77,7 +77,7 @@ def test_vregistry_path(self): self.assertEquals([unabsolutize(p) for p in self.config.vregistry_path()], - ['entities', 'web/views', 'sobjects', + ['entities', 'web/views', 'sobjects', 'hooks', 'file/entities.py', 'file/views', 'file/hooks.py', 'email/entities.py', 'email/views', 'email/hooks.py', 'test/data/entities.py']) diff -r 15d541321a8c -r 74c1597f8a82 test/unittest_dbapi.py --- a/test/unittest_dbapi.py Wed Jan 20 10:13:02 2010 +0100 +++ b/test/unittest_dbapi.py Wed Jan 20 10:13:45 2010 +0100 @@ -7,24 +7,21 @@ """ from cubicweb import ConnectionError from cubicweb.dbapi import ProgrammingError -from cubicweb.devtools.apptest import EnvBasedTC +from cubicweb.devtools.testlib import CubicWebTC -class DBAPITC(EnvBasedTC): - @property - def cnx(self): - return self.login('anon') +class DBAPITC(CubicWebTC): def test_public_repo_api(self): - cnx = self.cnx - self.assertEquals(cnx.get_schema(), self.env.repo.schema) + cnx = self.login('anon') + self.assertEquals(cnx.get_schema(), self.repo.schema) self.assertEquals(cnx.source_defs(), {'system': {'adapter': 'native', 'uri': 'system'}}) self.restore_connection() # proper way to close cnx self.assertRaises(ProgrammingError, cnx.get_schema) self.assertRaises(ProgrammingError, cnx.source_defs) def test_db_api(self): - cnx = self.cnx + cnx = self.login('anon') self.assertEquals(cnx.rollback(), None) self.assertEquals(cnx.commit(), None) self.restore_connection() # proper way to close cnx @@ -34,7 +31,7 @@ self.assertRaises(ProgrammingError, cnx.close) def test_api(self): - cnx = self.cnx + cnx = self.login('anon') self.assertEquals(cnx.user(None).login, 'anon') self.assertEquals(cnx.describe(1), (u'CWGroup', u'system', None)) self.restore_connection() # proper way to close cnx @@ -42,7 +39,7 @@ self.assertRaises(ConnectionError, cnx.describe, 1) def test_session_data_api(self): - cnx = self.cnx + cnx = self.login('anon') self.assertEquals(cnx.get_session_data('data'), None) self.assertEquals(cnx.session_data(), {}) cnx.set_session_data('data', 4) @@ -57,7 +54,7 @@ self.assertEquals(cnx.session_data(), {'data': 4}) def test_shared_data_api(self): - cnx = self.cnx + cnx = self.login('anon') self.assertEquals(cnx.get_shared_data('data'), None) cnx.set_shared_data('data', 4) self.assertEquals(cnx.get_shared_data('data'), 4) @@ -71,19 +68,6 @@ self.assertRaises(ConnectionError, cnx.set_shared_data, 'data', 0) self.assertRaises(ConnectionError, cnx.get_shared_data, 'data') - -# class DBAPICursorTC(EnvBasedTC): - -# @property -# def cursor(self): -# return self.env.cnx.cursor() - -# def test_api(self): -# cu = self.cursor -# self.assertEquals(cu.describe(1), (u'CWGroup', u'system', None)) -# #cu.close() -# #self.assertRaises(ConnectionError, cu.describe, 1) - if __name__ == '__main__': from logilab.common.testlib import unittest_main unittest_main() diff -r 15d541321a8c -r 74c1597f8a82 test/unittest_entity.py --- a/test/unittest_entity.py Wed Jan 20 10:13:02 2010 +0100 +++ b/test/unittest_entity.py Wed Jan 20 10:13:45 2010 +0100 @@ -10,32 +10,25 @@ from datetime import datetime from cubicweb import Binary, Unauthorized -from cubicweb.devtools.apptest import EnvBasedTC -from cubicweb.common.mttransforms import HAS_TAL +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.mttransforms import HAS_TAL from cubicweb.entities import fetch_config -class EntityTC(EnvBasedTC): - -## def setup_database(self): -## self.add_entity('Personne', nom=u'di mascio', prenom=u'adrien') -## self.add_entity('Task', title=u'fait ca !', description=u'et plus vite', start=now()) -## self.add_entity('Tag', name=u'x') -## self.add_entity('Link', title=u'perdu', url=u'http://www.perdu.com', -## embed=False) +class EntityTC(CubicWebTC): def test_boolean_value(self): - e = self.etype_instance('CWUser') + e = self.vreg['etypes'].etype_class('CWUser')(self.request()) self.failUnless(e) def test_yams_inheritance(self): from entities import Note - e = self.etype_instance('SubNote') + e = self.vreg['etypes'].etype_class('SubNote')(self.request()) self.assertIsInstance(e, Note) - e2 = self.etype_instance('SubNote') + e2 = self.vreg['etypes'].etype_class('SubNote')(self.request()) self.assertIs(e.__class__, e2.__class__) def test_has_eid(self): - e = self.etype_instance('CWUser') + e = self.vreg['etypes'].etype_class('CWUser')(self.request()) self.assertEquals(e.eid, None) self.assertEquals(e.has_eid(), False) e.eid = 'X' @@ -46,13 +39,14 @@ self.assertEquals(e.has_eid(), True) def test_copy(self): - self.add_entity('Tag', name=u'x') - p = self.add_entity('Personne', nom=u'toto') - oe = self.add_entity('Note', type=u'x') + req = self.request() + req.create_entity('Tag', name=u'x') + p = req.create_entity('Personne', nom=u'toto') + oe = req.create_entity('Note', type=u'x') self.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s', {'t': oe.eid, 'u': p.eid}, ('t','u')) self.execute('SET TAG tags X WHERE X eid %(x)s', {'x': oe.eid}, 'x') - e = self.add_entity('Note', type=u'z') + e = req.create_entity('Note', type=u'z') e.copy_relations(oe.eid) self.assertEquals(len(e.ecrit_par), 1) self.assertEquals(e.ecrit_par[0].eid, p.eid) @@ -61,12 +55,13 @@ self.assertEquals(len(e.created_by), 0) def test_copy_with_nonmeta_composite_inlined(self): - p = self.add_entity('Personne', nom=u'toto') - oe = self.add_entity('Note', type=u'x') - self.schema['ecrit_par'].set_rproperty('Note', 'Personne', 'composite', 'subject') + req = self.request() + p = req.create_entity('Personne', nom=u'toto') + oe = req.create_entity('Note', type=u'x') + self.schema['ecrit_par'].rdef('Note', 'Personne').composite = 'subject' self.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s', {'t': oe.eid, 'u': p.eid}, ('t','u')) - e = self.add_entity('Note', type=u'z') + e = req.create_entity('Note', type=u'z') e.copy_relations(oe.eid) self.failIf(e.ecrit_par) self.failUnless(oe.ecrit_par) @@ -102,7 +97,7 @@ user = self.entity('Any X WHERE X eid %(x)s', {'x':self.user().eid}, 'x') adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] self.commit() - self.assertEquals(user._related_cache.keys(), []) + self.assertEquals(user._related_cache, {}) email = user.primary_email[0] self.assertEquals(sorted(user._related_cache), ['primary_email_subject']) self.assertEquals(email._related_cache.keys(), ['primary_email_object']) @@ -112,9 +107,10 @@ self.failIf('in_group_subject' in group._related_cache, group._related_cache.keys()) def test_related_limit(self): - p = self.add_entity('Personne', nom=u'di mascio', prenom=u'adrien') + req = self.request() + p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') for tag in u'abcd': - self.add_entity('Tag', name=tag) + req.create_entity('Tag', name=tag) self.execute('SET X tags Y WHERE X is Tag, Y is Personne') self.assertEquals(len(p.related('tags', 'object', limit=2)), 2) self.assertEquals(len(p.related('tags', 'object')), 4) @@ -127,10 +123,10 @@ Note = self.vreg['etypes'].etype_class('Note') peschema = Personne.e_schema seschema = Societe.e_schema - peschema.subjrels['travaille'].set_rproperty(peschema, seschema, 'cardinality', '1*') - peschema.subjrels['connait'].set_rproperty(peschema, peschema, 'cardinality', '11') - peschema.subjrels['evaluee'].set_rproperty(peschema, Note.e_schema, 'cardinality', '1*') - seschema.subjrels['evaluee'].set_rproperty(seschema, Note.e_schema, 'cardinality', '1*') + peschema.subjrels['travaille'].rdef(peschema, seschema).cardinality = '1*' + peschema.subjrels['connait'].rdef(peschema, peschema).cardinality = '11' + peschema.subjrels['evaluee'].rdef(peschema, Note.e_schema).cardinality = '1*' + seschema.subjrels['evaluee'].rdef(seschema, Note.e_schema).cardinality = '1*' # testing basic fetch_attrs attribute self.assertEquals(Personne.fetch_rql(user), 'Any X,AA,AB,AC ORDERBY AA ASC ' @@ -165,13 +161,13 @@ self.assertEquals(Personne.fetch_rql(user), 'Any X,AA,AB ORDERBY AA ASC ' 'WHERE X is Personne, X nom AA, X connait AB?') # testing optional relation - peschema.subjrels['travaille'].set_rproperty(peschema, seschema, 'cardinality', '?*') + peschema.subjrels['travaille'].rdef(peschema, seschema).cardinality = '?*' Personne.fetch_attrs = ('nom', 'prenom', 'travaille') Societe.fetch_attrs = ('nom',) self.assertEquals(Personne.fetch_rql(user), 'Any X,AA,AB,AC,AD ORDERBY AA ASC WHERE X is Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD') # testing relation with cardinality > 1 - peschema.subjrels['travaille'].set_rproperty(peschema, seschema, 'cardinality', '**') + peschema.subjrels['travaille'].rdef(peschema, seschema).cardinality = '**' self.assertEquals(Personne.fetch_rql(user), 'Any X,AA,AB ORDERBY AA ASC WHERE X is Personne, X nom AA, X prenom AB') # XXX test unauthorized attribute @@ -185,7 +181,7 @@ self.failUnless(issubclass(self.vreg['etypes'].etype_class('SubNote'), Note)) Personne.fetch_attrs, Personne.fetch_order = fetch_config(('nom', 'type')) Note.fetch_attrs, Note.fetch_order = fetch_config(('type',)) - p = self.add_entity('Personne', nom=u'pouet') + p = self.request().create_entity('Personne', nom=u'pouet') self.assertEquals(p.related_rql('evaluee'), 'Any X,AA,AB ORDERBY AA ASC WHERE E eid %(x)s, E evaluee X, ' 'X type AA, X modification_date AB') @@ -248,8 +244,9 @@ 'A eid %(B)s, EXISTS(S identity A, NOT A in_group C, C name "guests", C is CWGroup)') def test_unrelated_base(self): - p = self.add_entity('Personne', nom=u'di mascio', prenom=u'adrien') - e = self.add_entity('Tag', name=u'x') + req = self.request() + p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') + e = req.create_entity('Tag', name=u'x') related = [r.eid for r in e.tags] self.failUnlessEqual(related, []) unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')] @@ -260,9 +257,10 @@ self.failIf(p.eid in unrelated) def test_unrelated_limit(self): - e = self.add_entity('Tag', name=u'x') - self.add_entity('Personne', nom=u'di mascio', prenom=u'adrien') - self.add_entity('Personne', nom=u'thenault', prenom=u'sylvain') + req = self.request() + e = req.create_entity('Tag', name=u'x') + req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') + req.create_entity('Personne', nom=u'thenault', prenom=u'sylvain') self.assertEquals(len(e.unrelated('tags', 'Personne', 'subject', limit=1)), 1) @@ -293,13 +291,13 @@ self.assertEquals([x.address for x in rset.entities()], []) def test_unrelated_new_entity(self): - e = self.etype_instance('CWUser') + e = self.vreg['etypes'].etype_class('CWUser')(self.request()) unrelated = [r[0] for r in e.unrelated('in_group', 'CWGroup', 'subject')] # should be default groups but owners, i.e. managers, users, guests self.assertEquals(len(unrelated), 3) def test_printable_value_string(self): - e = self.add_entity('Card', title=u'rest test', content=u'du :eid:`1:*ReST*`', + e = self.request().create_entity('Card', title=u'rest test', content=u'du :eid:`1:*ReST*`', content_format=u'text/rest') self.assertEquals(e.printable_value('content'), '

    du *ReST*

    \n') @@ -312,7 +310,6 @@ self.assertEquals(e.printable_value('content'), '

    \ndu *texte*\n

    ') e['title'] = 'zou' - #e = self.etype_instance('Task') e['content'] = '''\ a title ======= @@ -333,9 +330,10 @@ def test_printable_value_bytes(self): - e = self.add_entity('File', data=Binary('lambda x: 1'), data_format=u'text/x-python', + req = self.request() + e = req.create_entity('File', data=Binary('lambda x: 1'), data_format=u'text/x-python', data_encoding=u'ascii', data_name=u'toto.py') - from cubicweb.common import mttransforms + from cubicweb import mttransforms if mttransforms.HAS_PYGMENTS_TRANSFORMS: self.assertEquals(e.printable_value('data'), '''
    lambda x: 1
    @@ -348,14 +346,15 @@
     
    ''') - e = self.add_entity('File', data=Binary('*héhéhé*'), data_format=u'text/rest', + e = req.create_entity('File', data=Binary('*héhéhé*'), data_format=u'text/rest', data_encoding=u'utf-8', data_name=u'toto.txt') self.assertEquals(e.printable_value('data'), u'

    héhéhé

    \n') def test_printable_value_bad_html(self): """make sure we don't crash if we try to render invalid XHTML strings""" - e = self.add_entity('Card', title=u'bad html', content=u'
    R&D
    ', + req = self.request() + e = req.create_entity('Card', title=u'bad html', content=u'
    R&D
    ', content_format=u'text/html') tidy = lambda x: x.replace('\n', '') self.assertEquals(tidy(e.printable_value('content')), @@ -388,7 +387,8 @@ def test_printable_value_bad_html_ms(self): self.skip('fix soup2xhtml to handle this test') - e = self.add_entity('Card', title=u'bad html', content=u'
    R&D
    ', + req = self.request() + e = req.create_entity('Card', title=u'bad html', content=u'
    R&D
    ', content_format=u'text/html') tidy = lambda x: x.replace('\n', '') e['content'] = u'
    ms orifice produces weird html
    ' @@ -406,27 +406,28 @@ def test_fulltextindex(self): - e = self.etype_instance('File') + e = self.vreg['etypes'].etype_class('File')(self.request()) e['description'] = 'du html' e['description_format'] = 'text/html' e['data'] = Binary('some data') e['data_name'] = 'an html file' e['data_format'] = 'text/html' e['data_encoding'] = 'ascii' - e.req.transaction_data = {} # XXX req should be a session + e._cw.transaction_data = {} # XXX req should be a session self.assertEquals(set(e.get_words()), set(['an', 'html', 'file', 'du', 'html', 'some', 'data'])) def test_nonregr_relation_cache(self): - p1 = self.add_entity('Personne', nom=u'di mascio', prenom=u'adrien') - p2 = self.add_entity('Personne', nom=u'toto') + req = self.request() + p1 = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') + p2 = req.create_entity('Personne', nom=u'toto') self.execute('SET X evaluee Y WHERE X nom "di mascio", Y nom "toto"') self.assertEquals(p1.evaluee[0].nom, "toto") self.failUnless(not p1.reverse_evaluee) def test_complete_relation(self): - session = self.session() + session = self.session eid = session.unsafe_execute( 'INSERT TrInfo X: X comment "zou", X wf_info_for U, X from_state S1, X to_state S2 ' 'WHERE U login "admin", S1 name "activated", S2 name "deactivated"')[0][0] @@ -446,26 +447,28 @@ self.failUnless(state is samestate) def test_rest_path(self): - note = self.add_entity('Note', type=u'z') + req = self.request() + note = req.create_entity('Note', type=u'z') self.assertEquals(note.rest_path(), 'note/%s' % note.eid) # unique attr - tag = self.add_entity('Tag', name=u'x') + tag = req.create_entity('Tag', name=u'x') self.assertEquals(tag.rest_path(), 'tag/x') # test explicit rest_attr - person = self.add_entity('Personne', prenom=u'john', nom=u'doe') + person = req.create_entity('Personne', prenom=u'john', nom=u'doe') self.assertEquals(person.rest_path(), 'personne/doe') # ambiguity test - person2 = self.add_entity('Personne', prenom=u'remi', nom=u'doe') + person2 = req.create_entity('Personne', prenom=u'remi', nom=u'doe') self.assertEquals(person.rest_path(), 'personne/eid/%s' % person.eid) self.assertEquals(person2.rest_path(), 'personne/eid/%s' % person2.eid) # unique attr with None value (wikiid in this case) - card1 = self.add_entity('Card', title=u'hop') + card1 = req.create_entity('Card', title=u'hop') self.assertEquals(card1.rest_path(), 'card/eid/%s' % card1.eid) - card2 = self.add_entity('Card', title=u'pod', wikiid=u'zob/i') + card2 = req.create_entity('Card', title=u'pod', wikiid=u'zob/i') self.assertEquals(card2.rest_path(), 'card/zob%2Fi') def test_set_attributes(self): - person = self.add_entity('Personne', nom=u'di mascio', prenom=u'adrien') + req = self.request() + person = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') self.assertEquals(person.prenom, u'adrien') self.assertEquals(person.nom, u'di mascio') person.set_attributes(prenom=u'sylvain', nom=u'thénault') @@ -474,7 +477,8 @@ self.assertEquals(person.nom, u'thénault') def test_metainformation_and_external_absolute_url(self): - note = self.add_entity('Note', type=u'z') + req = self.request() + note = req.create_entity('Note', type=u'z') metainf = note.metainformation() self.assertEquals(metainf, {'source': {'adapter': 'native', 'uri': 'system'}, 'type': u'Note', 'extid': None}) self.assertEquals(note.absolute_url(), 'http://testing.fr/cubicweb/note/%s' % note.eid) @@ -484,14 +488,16 @@ self.assertEquals(note.absolute_url(), 'http://cubicweb2.com/note/1234') def test_absolute_url_empty_field(self): - card = self.add_entity('Card', wikiid=u'', title=u'test') + req = self.request() + card = req.create_entity('Card', wikiid=u'', title=u'test') self.assertEquals(card.absolute_url(), 'http://testing.fr/cubicweb/card/eid/%s' % card.eid) def test_create_entity(self): - p1 = self.add_entity('Personne', nom=u'fayolle', prenom=u'alexandre') - p2 = self.add_entity('Personne', nom=u'campeas', prenom=u'aurelien') - note = self.add_entity('Note', type=u'z') + req = self.request() + p1 = req.create_entity('Personne', nom=u'fayolle', prenom=u'alexandre') + p2 = req.create_entity('Personne', nom=u'campeas', prenom=u'aurelien') + note = req.create_entity('Note', type=u'z') req = self.request() p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien', connait=p1, evaluee=[p1, p2], diff -r 15d541321a8c -r 74c1597f8a82 test/unittest_mail.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/unittest_mail.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +"""unit tests for module cubicweb.mail + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" + +import os +import sys + +from logilab.common.testlib import unittest_main +from logilab.common.umessage import message_from_string + +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.mail import format_mail + + +def getlogin(): + """avoid usinng os.getlogin() because of strange tty / stdin problems + (man 3 getlogin) + Another solution would be to use $LOGNAME, $USER or $USERNAME + """ + if sys.platform != 'win32': + import pwd + return pwd.getpwuid(os.getuid())[0] + else: + return os.environ.get('USERNAME') + + +class EmailTC(CubicWebTC): + + def test_format_mail(self): + self.set_option('sender-addr', 'bim@boum.fr') + self.set_option('sender-name', 'BimBam') + + mail = format_mail({'name': 'oim', 'email': 'oim@logilab.fr'}, + ['test@logilab.fr'], u'un petit cöucou', u'bïjour', + config=self.config) + self.assertLinesEquals(mail.as_string(), """\ +MIME-Version: 1.0 +Content-Type: text/plain; charset="utf-8" +Content-Transfer-Encoding: base64 +Subject: =?utf-8?q?b=C3=AFjour?= +From: =?utf-8?q?oim?= +Reply-to: =?utf-8?q?oim?= , =?utf-8?q?BimBam?= +X-CW: data +To: test@logilab.fr + +dW4gcGV0aXQgY8O2dWNvdQ== +""") + msg = message_from_string(mail.as_string()) + self.assertEquals(msg.get('subject'), u'bïjour') + self.assertEquals(msg.get('from'), u'oim ') + self.assertEquals(msg.get('to'), u'test@logilab.fr') + self.assertEquals(msg.get('reply-to'), u'oim , BimBam ') + self.assertEquals(msg.get_payload(decode=True), u'un petit cöucou') + + + def test_format_mail_euro(self): + mail = format_mail({'name': u'oîm', 'email': u'oim@logilab.fr'}, + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €') + self.assertLinesEquals(mail.as_string(), """\ +MIME-Version: 1.0 +Content-Type: text/plain; charset="utf-8" +Content-Transfer-Encoding: base64 +Subject: =?utf-8?b?YsOvam91ciDigqw=?= +From: =?utf-8?q?o=C3=AEm?= +Reply-to: =?utf-8?q?o=C3=AEm?= +To: test@logilab.fr + +dW4gcGV0aXQgY8O2dWNvdSDigqw= +""") + msg = message_from_string(mail.as_string()) + self.assertEquals(msg.get('subject'), u'bïjour €') + self.assertEquals(msg.get('from'), u'oîm ') + self.assertEquals(msg.get('to'), u'test@logilab.fr') + self.assertEquals(msg.get('reply-to'), u'oîm ') + self.assertEquals(msg.get_payload(decode=True), u'un petit cöucou €') + + + def test_format_mail_from_reply_to(self): + # no sender-name, sender-addr in the configuration + self.set_option('sender-name', '') + self.set_option('sender-addr', '') + msg = format_mail({'name': u'', 'email': u''}, + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', + config=self.config) + self.assertEquals(msg.get('from'), u'') + self.assertEquals(msg.get('reply-to'), None) + msg = format_mail({'name': u'tutu', 'email': u'tutu@logilab.fr'}, + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', + config=self.config) + msg = message_from_string(msg.as_string()) + self.assertEquals(msg.get('from'), u'tutu ') + self.assertEquals(msg.get('reply-to'), u'tutu ') + msg = format_mail({'name': u'tutu', 'email': u'tutu@logilab.fr'}, + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €') + msg = message_from_string(msg.as_string()) + self.assertEquals(msg.get('from'), u'tutu ') + self.assertEquals(msg.get('reply-to'), u'tutu ') + # set sender name and address as expected + self.set_option('sender-name', 'cubicweb-test') + self.set_option('sender-addr', 'cubicweb-test@logilab.fr') + # anonymous notification: no name and no email specified + msg = format_mail({'name': u'', 'email': u''}, + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', + config=self.config) + msg = message_from_string(msg.as_string()) + self.assertEquals(msg.get('from'), u'cubicweb-test ') + self.assertEquals(msg.get('reply-to'), u'cubicweb-test ') + # anonymous notification: only email specified + msg = format_mail({'email': u'tutu@logilab.fr'}, + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', + config=self.config) + msg = message_from_string(msg.as_string()) + self.assertEquals(msg.get('from'), u'cubicweb-test ') + self.assertEquals(msg.get('reply-to'), u'cubicweb-test , cubicweb-test ') + # anonymous notification: only name specified + msg = format_mail({'name': u'tutu'}, + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', + config=self.config) + msg = message_from_string(msg.as_string()) + self.assertEquals(msg.get('from'), u'tutu ') + self.assertEquals(msg.get('reply-to'), u'tutu ') + + + +if __name__ == '__main__': + unittest_main() + diff -r 15d541321a8c -r 74c1597f8a82 test/unittest_migration.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/unittest_migration.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,103 @@ +"""cubicweb.migration unit tests + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" + +from os.path import abspath +from logilab.common.testlib import TestCase, unittest_main + +from cubicweb.devtools import TestServerConfiguration +from cubicweb.cwconfig import CubicWebConfiguration +from cubicweb.migration import MigrationHelper, filter_scripts +from cubicweb.server.migractions import ServerMigrationHelper + + +class Schema(dict): + def has_entity(self, e_type): + return self.has_key(e_type) + +SMIGRDIR = abspath('data/server_migration') + '/' +TMIGRDIR = abspath('data/migration') + '/' + +class MigrTestConfig(TestServerConfiguration): + verbosity = 0 + def migration_scripts_dir(cls): + return SMIGRDIR + + def cube_migration_scripts_dir(cls, cube): + return TMIGRDIR + +class MigrationToolsTC(TestCase): + def setUp(self): + self.config = MigrTestConfig('data') + from yams.schema import Schema + self.config.load_schema = lambda expand_cubes=False: Schema('test') + self.config.__class__.cubicweb_appobject_path = frozenset() + self.config.__class__.cube_appobject_path = frozenset() + + def test_filter_scripts_base(self): + self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,3,0), (2,4,0)), + []) + self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,5,0)), + [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql')]) + self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,6,0)), + [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')]) + self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,6,0)), + [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql'), + ((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')]) + self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,5,1)), + []) + self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,10,2)), + [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql'), + ((2, 10, 2), SMIGRDIR+'2.10.2_Any.sql')]) + self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,1), (2,6,0)), + [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')]) + + self.assertListEquals(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,3)), + [((0, 0, 3), TMIGRDIR+'0.0.3_Any.py')]) + self.assertListEquals(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,4)), + [((0, 0, 3), TMIGRDIR+'0.0.3_Any.py'), + ((0, 0, 4), TMIGRDIR+'0.0.4_Any.py')]) + + def test_filter_scripts_for_mode(self): + config = CubicWebConfiguration('data') + config.verbosity = 0 + self.assert_(not isinstance(config.migration_handler(), ServerMigrationHelper)) + self.assertIsInstance(config.migration_handler(), MigrationHelper) + config = self.config + config.__class__.name = 'twisted' + self.assertListEquals(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), + [((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'), + ((0, 1 ,0), TMIGRDIR+'0.1.0_web.py')]) + config.__class__.name = 'repository' + self.assertListEquals(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), + [((0, 1 ,0), TMIGRDIR+'0.1.0_Any.py'), + ((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'), + ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py')]) + config.__class__.name = 'all-in-one' + self.assertListEquals(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), + [((0, 1 ,0), TMIGRDIR+'0.1.0_Any.py'), + ((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'), + ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py'), + ((0, 1 ,0), TMIGRDIR+'0.1.0_web.py')]) + config.__class__.name = 'repository' + + +from cubicweb.devtools import ApptestConfiguration, init_test_database, cleanup_sqlite + +class BaseCreationTC(TestCase): + + def test_db_creation(self): + """make sure database can be created""" + config = ApptestConfiguration('data') + source = config.sources()['system'] + self.assertEquals(source['db-driver'], 'sqlite') + cleanup_sqlite(source['db-name'], removetemplate=True) + init_test_database(config=config) + + +if __name__ == '__main__': + unittest_main() diff -r 15d541321a8c -r 74c1597f8a82 test/unittest_rqlrewrite.py --- a/test/unittest_rqlrewrite.py Wed Jan 20 10:13:02 2010 +0100 +++ b/test/unittest_rqlrewrite.py Wed Jan 20 10:13:45 2010 +0100 @@ -18,7 +18,8 @@ config = TestServerConfiguration('data/rewrite') config.bootstrap_cubes() schema = config.load_schema() -schema.add_relation_def(mock_object(subject='Card', name='in_state', object='State', cardinality='1*')) +from yams.buildobjs import RelationDefinition +schema.add_relation_def(RelationDefinition(subject='Card', name='in_state', object='State', cardinality='1*')) rqlhelper = RQLHelper(schema, special_relations={'eid': 'uid', 'has_text': 'fti'}) diff -r 15d541321a8c -r 74c1597f8a82 test/unittest_rset.py --- a/test/unittest_rset.py Wed Jan 20 10:13:02 2010 +0100 +++ b/test/unittest_rset.py Wed Jan 20 10:13:45 2010 +0100 @@ -1,5 +1,5 @@ # coding: utf-8 -"""unit tests for module cubicweb.common.utils +"""unit tests for module cubicweb.utils :organization: Logilab :copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. @@ -10,7 +10,7 @@ from logilab.common.testlib import TestCase, unittest_main -from cubicweb.devtools.apptest import EnvBasedTC +from cubicweb.devtools.testlib import CubicWebTC from cubicweb.selectors import traced_selection from urlparse import urlsplit @@ -55,7 +55,7 @@ -class ResultSetTC(EnvBasedTC): +class ResultSetTC(CubicWebTC): def setUp(self): super(ResultSetTC, self).setUp() @@ -100,12 +100,12 @@ 'Any U,L where U is CWUser, U login L', description=[['CWUser', 'String']] * 3) rs.req = self.request() - rs.vreg = self.env.vreg + rs.vreg = self.vreg self.assertEquals(rs.limit(2).rows, [[12000, 'adim'], [13000, 'syt']]) rs2 = rs.limit(2, offset=1) self.assertEquals(rs2.rows, [[13000, 'syt'], [14000, 'nico']]) - self.assertEquals(rs2.get_entity(0, 0).row, 0) + self.assertEquals(rs2.get_entity(0, 0).cw_row, 0) self.assertEquals(rs.limit(2, offset=2).rows, [[14000, 'nico']]) self.assertEquals(rs.limit(2, offset=3).rows, []) @@ -115,7 +115,7 @@ 'Any U,L where U is CWUser, U login L', description=[['CWUser', 'String']] * 3) rs.req = self.request() - rs.vreg = self.env.vreg + rs.vreg = self.vreg def test_filter(entity): return entity.login != 'nico' @@ -140,7 +140,7 @@ 'Any U,L where U is CWUser, U login L', description=[['CWUser', 'String']] * 3) rs.req = self.request() - rs.vreg = self.env.vreg + rs.vreg = self.vreg rs2 = rs.sorted_rset(lambda e:e['login']) self.assertEquals(len(rs2), 3) @@ -170,7 +170,7 @@ 'D created_by U, D title T', description=[['CWUser', 'String', 'String']] * 5) rs.req = self.request() - rs.vreg = self.env.vreg + rs.vreg = self.vreg rsets = rs.split_rset(lambda e:e['login']) self.assertEquals(len(rsets), 3) @@ -229,16 +229,16 @@ rset = self.execute('Any X,Y,XT,YN WHERE X bookmarked_by Y, X title XT, Y login YN') e = rset.get_entity(0, 0) - self.assertEquals(e.row, 0) - self.assertEquals(e.col, 0) + self.assertEquals(e.cw_row, 0) + self.assertEquals(e.cw_col, 0) self.assertEquals(e['title'], 'zou') self.assertRaises(KeyError, e.__getitem__, 'path') self.assertEquals(e.view('text'), 'zou') self.assertEquals(pprelcachedict(e._related_cache), []) e = rset.get_entity(0, 1) - self.assertEquals(e.row, 0) - self.assertEquals(e.col, 1) + self.assertEquals(e.cw_row, 0) + self.assertEquals(e.cw_col, 1) self.assertEquals(e['login'], 'anon') self.assertRaises(KeyError, e.__getitem__, 'firstname') self.assertEquals(pprelcachedict(e._related_cache), @@ -304,8 +304,8 @@ ('Bookmark', 'manger'), ('CWGroup', 'owners'), ('CWGroup', 'users')) for entity in rset.entities(): # test get_entity for each row actually - etype, n = expected[entity.row] - self.assertEquals(entity.id, etype) + etype, n = expected[entity.cw_row] + self.assertEquals(entity.__regid__, etype) attr = etype == 'Bookmark' and 'title' or 'name' self.assertEquals(entity[attr], n) @@ -326,7 +326,7 @@ self.assertEquals(entity.eid, e.eid) self.assertEquals(rtype, 'title') entity, rtype = rset.related_entity(1, 1) - self.assertEquals(entity.id, 'CWGroup') + self.assertEquals(entity.__regid__, 'CWGroup') self.assertEquals(rtype, 'name') # rset = self.execute('Any X,N ORDERBY N WHERE X is Bookmark WITH X,N BEING ' diff -r 15d541321a8c -r 74c1597f8a82 test/unittest_schema.py --- a/test/unittest_schema.py Wed Jan 20 10:13:02 2010 +0100 +++ b/test/unittest_schema.py Wed Jan 20 10:13:45 2010 +0100 @@ -21,7 +21,7 @@ from cubicweb.schema import ( CubicWebSchema, CubicWebEntitySchema, CubicWebSchemaLoader, RQLConstraint, RQLUniqueConstraint, RQLVocabularyConstraint, - ERQLExpression, RRQLExpression, + RQLExpression, ERQLExpression, RRQLExpression, normalize_expression, order_eschemas, guess_rrqlexpr_mainvars) from cubicweb.devtools import TestServerConfiguration as TestConfiguration @@ -46,7 +46,7 @@ schema = CubicWebSchema('Test Schema') enote = schema.add_entity_type(EntityType('Note')) eaffaire = schema.add_entity_type(EntityType('Affaire')) -eperson = schema.add_entity_type(EntityType('Personne', permissions=PERSONNE_PERMISSIONS)) +eperson = schema.add_entity_type(EntityType('Personne', __permissions__=PERSONNE_PERMISSIONS)) esociete = schema.add_entity_type(EntityType('Societe')) RELS = ( @@ -75,12 +75,13 @@ for rel in RELS: _from, _type, _to = rel.split() if not _type.lower() in done: - if _type == 'concerne': - schema.add_relation_type(RelationType(_type, permissions=CONCERNE_PERMISSIONS)) - else: - schema.add_relation_type(RelationType(_type)) + schema.add_relation_type(RelationType(_type)) done[_type.lower()] = True - schema.add_relation_def(RelationDefinition(_from, _type, _to)) + if _type == 'concerne': + schema.add_relation_def(RelationDefinition(_from, _type, _to, + __permissions__=CONCERNE_PERMISSIONS)) + else: + schema.add_relation_def(RelationDefinition(_from, _type, _to)) class CubicWebSchemaTC(TestCase): @@ -108,23 +109,21 @@ self.assertEqual(schema.rschema('concerne').type, 'concerne') def test_entity_perms(self): - eperson.set_default_groups() self.assertEqual(eperson.get_groups('read'), set(('managers', 'users', 'guests'))) self.assertEqual(eperson.get_groups('update'), set(('managers', 'owners',))) self.assertEqual(eperson.get_groups('delete'), set(('managers', 'owners'))) self.assertEqual(eperson.get_groups('add'), set(('managers',))) self.assertEqual([str(e) for e in eperson.get_rqlexprs('add')], ['Any X WHERE X travaille S, S owned_by U, X eid %(x)s, U eid %(u)s']) - eperson.set_groups('read', ('managers',)) + eperson.set_action_permissions('read', ('managers',)) self.assertEqual(eperson.get_groups('read'), set(('managers',))) def test_relation_perms(self): - rconcerne = schema.rschema('concerne') - rconcerne.set_default_groups() + rconcerne = schema.rschema('concerne').rdef('Personne', 'Societe') self.assertEqual(rconcerne.get_groups('read'), set(('managers', 'users', 'guests'))) self.assertEqual(rconcerne.get_groups('delete'), set(('managers',))) self.assertEqual(rconcerne.get_groups('add'), set(('managers', ))) - rconcerne.set_groups('read', ('managers',)) + rconcerne.set_action_permissions('read', ('managers',)) self.assertEqual(rconcerne.get_groups('read'), set(('managers',))) self.assertEqual([str(e) for e in rconcerne.get_rqlexprs('add')], ['Any S,U WHERE U has_update_permission S, S eid %(s)s, U eid %(u)s']) @@ -229,9 +228,9 @@ self.assertListEquals(rels, ['bookmarked_by', 'created_by', 'for_user', 'identity', 'owned_by', 'wf_info_for']) rschema = schema.rschema('relation_type') - properties = rschema.rproperties('CWAttribute', 'CWRType') - self.assertEquals(properties['cardinality'], '1*') - constraints = properties['constraints'] + properties = rschema.rdef('CWAttribute', 'CWRType') + self.assertEquals(properties.cardinality, '1*') + constraints = properties.constraints self.failUnlessEqual(len(constraints), 1, constraints) constraint = constraints[0] self.failUnless(isinstance(constraint, RQLConstraint)) @@ -260,13 +259,13 @@ self._test('rrqlexpr_on_eetype.py', "can't use RRQLExpression on an entity type, use an ERQLExpression (ToTo)") def test_erqlexpr_on_rtype(self): - self._test('erqlexpr_on_ertype.py', "can't use ERQLExpression on a relation type, use a RRQLExpression (toto)") + self._test('erqlexpr_on_ertype.py', "can't use ERQLExpression on relation ToTo toto TuTu, use a RRQLExpression") def test_rqlexpr_on_rtype_read(self): - self._test('rqlexpr_on_ertype_read.py', "can't use rql expression for read permission of a relation type (toto)") + self._test('rqlexpr_on_ertype_read.py', "can't use rql expression for read permission of relation ToTo toto TuTu") def test_rrqlexpr_on_attr(self): - self._test('rrqlexpr_on_attr.py', "can't use RRQLExpression on a final relation type (eg attribute relation), use an ERQLExpression (attr)") + self._test('rrqlexpr_on_attr.py', "can't use RRQLExpression on attribute ToTo.attr[String], use an ERQLExpression") class NormalizeExpressionTC(TestCase): @@ -275,10 +274,16 @@ self.assertEquals(normalize_expression('X bla Y,Y blur Z , Z zigoulou X '), 'X bla Y, Y blur Z, Z zigoulou X') +class RQLExpressionTC(TestCase): + def test_comparison(self): + self.assertEquals(ERQLExpression('X is CWUser', 'X', 0), ERQLExpression('X is CWUser', 'X', 0)) + self.assertNotEquals(ERQLExpression('X is CWUser', 'X', 0), ERQLExpression('X is CWGroup', 'X', 0)) + class GuessRrqlExprMainVarsTC(TestCase): def test_exists(self): mainvars = guess_rrqlexpr_mainvars(normalize_expression('NOT EXISTS(O team_competition C, C level < 3)')) self.assertEquals(mainvars, 'O') + if __name__ == '__main__': unittest_main() diff -r 15d541321a8c -r 74c1597f8a82 test/unittest_selectors.py --- a/test/unittest_selectors.py Wed Jan 20 10:13:02 2010 +0100 +++ b/test/unittest_selectors.py Wed Jan 20 10:13:45 2010 +0100 @@ -8,7 +8,7 @@ from logilab.common.testlib import TestCase, unittest_main -from cubicweb.devtools.testlib import EnvBasedTC +from cubicweb.devtools.testlib import CubicWebTC from cubicweb.appobject import Selector, AndSelector, OrSelector from cubicweb.selectors import implements, match_user_groups from cubicweb.interfaces import IDownloadable @@ -88,7 +88,7 @@ self.assertIs(csel.search_selector(implements), sel) -class ImplementsSelectorTC(EnvBasedTC): +class ImplementsSelectorTC(CubicWebTC): def test_etype_priority(self): req = self.request() cls = self.vreg['etypes'].etype_class('File') @@ -103,11 +103,11 @@ self.failIf(implements('Societe').score_class(cls, self.request())) -class MatchUserGroupsTC(EnvBasedTC): +class MatchUserGroupsTC(CubicWebTC): def test_owners_group(self): """tests usage of 'owners' group with match_user_group""" class SomeAction(action.Action): - id = 'yo' + __regid__ = 'yo' category = 'foo' __select__ = match_user_groups('owners') self.vreg._loadedmods[__name__] = {} @@ -118,19 +118,22 @@ self.create_user('john') self.login('john') # it should not be possible to use SomeAction not owned objects - rset, req = self.env.get_rset_and_req('Any G WHERE G is CWGroup, G name "managers"') + req = self.request() + rset = req.execute('Any G WHERE G is CWGroup, G name "managers"') self.failIf('yo' in dict(self.pactions(req, rset))) # insert a new card, and check that we can use SomeAction on our object self.execute('INSERT Card C: C title "zoubidou"') self.commit() - rset, req = self.env.get_rset_and_req('Card C WHERE C title "zoubidou"') + req = self.request() + rset = req.execute('Card C WHERE C title "zoubidou"') self.failUnless('yo' in dict(self.pactions(req, rset)), self.pactions(req, rset)) # make sure even managers can't use the action self.restore_connection() - rset, req = self.env.get_rset_and_req('Card C WHERE C title "zoubidou"') + req = self.request() + rset = req.execute('Card C WHERE C title "zoubidou"') self.failIf('yo' in dict(self.pactions(req, rset))) finally: - del self.vreg[SomeAction.__registry__][SomeAction.id] + del self.vreg[SomeAction.__registry__][SomeAction.__regid__] if __name__ == '__main__': unittest_main() diff -r 15d541321a8c -r 74c1597f8a82 test/unittest_uilib.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/unittest_uilib.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +"""unittests for cubicweb.uilib + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" + +__docformat__ = "restructuredtext en" + +from logilab.common.testlib import TestCase, unittest_main +from logilab.common.tree import Node + +from cubicweb import uilib + +class UILIBTC(TestCase): + + def test_remove_tags(self): + """make sure remove_tags remove all tags""" + data = [ + ('

    Hello

    ', 'Hello'), + ('

    Hello spam

    ', 'Hello spam'), + ('
    Hello', 'Hello'), + ('

    ', ''), + ] + for text, expected in data: + got = uilib.remove_html_tags(text) + self.assertEquals(got, expected) + + def test_fallback_safe_cut(self): + self.assertEquals(uilib.fallback_safe_cut(u'ab cd', 4), u'ab c...') + self.assertEquals(uilib.fallback_safe_cut(u'ab cd', 5), u'ab cd') + self.assertEquals(uilib.fallback_safe_cut(u'ab &d', 4), u'ab &...') + self.assertEquals(uilib.fallback_safe_cut(u'ab &d ef', 5), u'ab &d...') + self.assertEquals(uilib.fallback_safe_cut(u'ab ìd', 4), u'ab ì...') + self.assertEquals(uilib.fallback_safe_cut(u'& &d ef', 4), u'& &d...') + + def test_lxml_safe_cut(self): + self.assertEquals(uilib.safe_cut(u'aaa
    aaad
    ef', 4), u'

    aaa

    a...
    ') + self.assertEquals(uilib.safe_cut(u'aaa
    aaad
    ef', 7), u'

    aaa

    aaad
    ...') + self.assertEquals(uilib.safe_cut(u'aaa
    aaad
    ', 7), u'

    aaa

    aaad
    ') + # Missing ellipsis due to space management but we don't care + self.assertEquals(uilib.safe_cut(u'ab &d', 4), u'

    ab &...

    ') + + def test_cut(self): + """tests uilib.cut() behaviour""" + data = [ + ('hello', 'hello'), + ('hello world', 'hello wo...'), + ("hellO' world", "hellO..."), + ] + for text, expected in data: + got = uilib.cut(text, 8) + self.assertEquals(got, expected) + + def test_text_cut(self): + """tests uilib.text_cut() behaviour with no text""" + data = [('',''), + ("""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod +tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, +quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo +consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse +cillum dolore eu fugiat nulla pariatur.""", + "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod \ +tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, \ +quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo \ +consequat."), + ("""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod +tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam, +quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo +consequat Duis aute irure dolor in reprehenderit in voluptate velit esse +cillum dolore eu fugiat nulla pariatur Excepteur sint occaecat cupidatat non +proident, sunt in culpa qui officia deserunt mollit anim id est laborum +""", + "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod \ +tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam, \ +quis nostrud exercitation ullamco laboris nisi"), + ] + for text, expected in data: + got = uilib.text_cut(text, 30) + self.assertEquals(got, expected) + +if __name__ == '__main__': + unittest_main() + diff -r 15d541321a8c -r 74c1597f8a82 test/unittest_utils.py --- a/test/unittest_utils.py Wed Jan 20 10:13:02 2010 +0100 +++ b/test/unittest_utils.py Wed Jan 20 10:13:45 2010 +0100 @@ -1,4 +1,4 @@ -"""unit tests for module cubicweb.common.utils +"""unit tests for module cubicweb.utils :organization: Logilab :copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. diff -r 15d541321a8c -r 74c1597f8a82 test/unittest_vregistry.py --- a/test/unittest_vregistry.py Wed Jan 20 10:13:02 2010 +0100 +++ b/test/unittest_vregistry.py Wed Jan 20 10:13:45 2010 +0100 @@ -40,14 +40,6 @@ self.vreg.initialization_completed() self.assertEquals(len(self.vreg['views']['primary']), 1) - def test___selectors__compat(self): - myselector1 = lambda *args: 1 - myselector2 = lambda *args: 1 - class AnAppObject(AppObject): - __selectors__ = (myselector1, myselector2) - AnAppObject.build___select__() - self.assertEquals(AnAppObject.__select__(AnAppObject), 2) - def test_properties(self): self.failIf('system.version.cubicweb' in self.vreg['propertydefs']) self.failUnless(self.vreg.property_info('system.version.cubicweb')) diff -r 15d541321a8c -r 74c1597f8a82 toolsutils.py --- a/toolsutils.py Wed Jan 20 10:13:02 2010 +0100 +++ b/toolsutils.py Wed Jan 20 10:13:45 2010 +0100 @@ -27,6 +27,9 @@ from cubicweb import warning from cubicweb import ConfigurationError, ExecutionError +def underline_title(title, car='-'): + return title+'\n'+(car*len(title)) + def iter_dir(directory, condition_file=None, ignore=()): """iterate on a directory""" for sub in listdir(directory): diff -r 15d541321a8c -r 74c1597f8a82 uilib.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/uilib.py Wed Jan 20 10:13:45 2010 +0100 @@ -0,0 +1,383 @@ +# -*- coding: utf-8 -*- +"""user interface libraries + +contains some functions designed to help implementation of cubicweb user interface + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" + +import csv +import re +from StringIO import StringIO + +from logilab.mtconverter import xml_escape, html_unescape + +from cubicweb.utils import ustrftime + +def rql_for_eid(eid): + """return the rql query necessary to fetch entity with the given eid. This + function should only be used to generate link with rql inside, not to give + to cursor.execute (in which case you won't benefit from rql cache). + + :Parameters: + - `eid`: the eid of the entity we should search + :rtype: str + :return: the rql query + """ + return 'Any X WHERE X eid %s' % eid + + +def printable_value(req, attrtype, value, props=None, displaytime=True): + """return a displayable value (i.e. unicode string)""" + if value is None or attrtype == 'Bytes': + return u'' + if attrtype == 'String': + # don't translate empty value if you don't want strange results + if props is not None and value and props.get('internationalizable'): + return req._(value) + return value + if attrtype == 'Date': + return ustrftime(value, req.property_value('ui.date-format')) + if attrtype == 'Time': + return ustrftime(value, req.property_value('ui.time-format')) + if attrtype == 'Datetime': + if displaytime: + return ustrftime(value, req.property_value('ui.datetime-format')) + return ustrftime(value, req.property_value('ui.date-format')) + if attrtype == 'Boolean': + if value: + return req._('yes') + return req._('no') + if attrtype == 'Float': + value = req.property_value('ui.float-format') % value + return unicode(value) + + +# text publishing ############################################################# + +try: + from cubicweb.ext.rest import rest_publish # pylint: disable-msg=W0611 +except ImportError: + def rest_publish(entity, data): + """default behaviour if docutils was not found""" + return xml_escape(data) + +TAG_PROG = re.compile(r'', re.U) +def remove_html_tags(text): + """Removes HTML tags from text + + >>> remove_html_tags('hi world') + 'hi world' + >>> + """ + return TAG_PROG.sub('', text) + + +REF_PROG = re.compile(r"([^<]*)", re.U) +def _subst_rql(view, obj): + delim, rql, descr = obj.groups() + return u'%s' % (view._cw.build_url(rql=rql), descr) + +def html_publish(view, text): + """replace links by """ + if not text: + return u'' + return REF_PROG.sub(lambda obj, view=view:_subst_rql(view, obj), text) + +# fallback implementation, nicer one defined below if lxml is available +def soup2xhtml(data, encoding): + # normalize line break + # see http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1 + return u'\n'.join(data.splitlines()) + +# fallback implementation, nicer one defined below if lxml> 2.0 is available +def safe_cut(text, length): + """returns a string of length based on , removing any html + tags from given text if cut is necessary.""" + if text is None: + return u'' + noenttext = html_unescape(text) + text_nohtml = remove_html_tags(noenttext) + # try to keep html tags if text is short enough + if len(text_nohtml) <= length: + return text + # else if un-tagged text is too long, cut it + return xml_escape(text_nohtml[:length] + u'...') + +fallback_safe_cut = safe_cut + + +try: + from lxml import etree +except (ImportError, AttributeError): + # gae environment: lxml not available + pass +else: + + def soup2xhtml(data, encoding): + """tidy (at least try) html soup and return the result + Note: the function considers a string with no surrounding tag as valid + if
    `data`
    can be parsed by an XML parser + """ + # normalize line break + # see http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1 + data = u'\n'.join(data.splitlines()) + # XXX lxml 1.1 support still needed ? + xmltree = etree.HTML('
    %s
    ' % data) + # NOTE: lxml 1.1 (etch platforms) doesn't recognize + # the encoding=unicode parameter (lxml 2.0 does), this is + # why we specify an encoding and re-decode to unicode later + body = etree.tostring(xmltree[0], encoding=encoding) + # remove and and decode to unicode + return body[11:-13].decode(encoding) + + if hasattr(etree.HTML('
    test
    '), 'iter'): + + def safe_cut(text, length): + """returns an html document of length based on , + and cut is necessary. + """ + if text is None: + return u'' + dom = etree.HTML(text) + curlength = 0 + add_ellipsis = False + for element in dom.iter(): + if curlength >= length: + parent = element.getparent() + parent.remove(element) + if curlength == length and (element.text or element.tail): + add_ellipsis = True + else: + if element.text is not None: + element.text = cut(element.text, length - curlength) + curlength += len(element.text) + if element.tail is not None: + if curlength < length: + element.tail = cut(element.tail, length - curlength) + curlength += len(element.tail) + elif curlength == length: + element.tail = '...' + else: + element.tail = '' + text = etree.tounicode(dom[0])[6:-7] # remove wrapping + if add_ellipsis: + return text + u'...' + return text + +def text_cut(text, nbwords=30, gotoperiod=True): + """from the given plain text, return a text with at least words, + trying to go to the end of the current sentence. + + :param nbwords: the minimum number of words required + :param gotoperiod: specifies if the function should try to go to + the first period after the cut (i.e. finish + the sentence if possible) + + Note that spaces are normalized. + """ + if text is None: + return u'' + words = text.split() + text = u' '.join(words) # normalize spaces + textlength = minlength = len(' '.join(words[:nbwords])) + if gotoperiod: + textlength = text.find('.', minlength) + 1 + if textlength == 0: # no period found + textlength = minlength + return text[:textlength] + +def cut(text, length): + """returns a string of a maximum length based on + (approximatively, since if text has been cut, '...' is added to the end of the string, + resulting in a string of len + 3) + """ + if text is None: + return u'' + if len(text) <= length: + return text + # else if un-tagged text is too long, cut it + return text[:length] + u'...' + + + +# HTML generation helper functions ############################################ + +HTML4_EMPTY_TAGS = frozenset(('base', 'meta', 'link', 'hr', 'br', 'param', + 'img', 'area', 'input', 'col')) + +def sgml_attributes(attrs): + return u' '.join(u'%s="%s"' % (attr, xml_escape(unicode(value))) + for attr, value in sorted(attrs.items()) + if value is not None) + +def simple_sgml_tag(tag, content=None, escapecontent=True, **attrs): + """generation of a simple sgml tag (eg without children tags) easier + + content and attri butes will be escaped + """ + value = u'<%s' % tag + if attrs: + try: + attrs['class'] = attrs.pop('klass') + except KeyError: + pass + value += u' ' + sgml_attributes(attrs) + if content: + if escapecontent: + content = xml_escape(unicode(content)) + value += u'>%s' % (content, tag) + else: + if tag in HTML4_EMPTY_TAGS: + value += u' />' + else: + value += u'>' % tag + return value + +def tooltipize(text, tooltip, url=None): + """make an HTML tooltip""" + url = url or '#' + return u'
    %s' % (url, tooltip, text) + +def toggle_action(nodeid): + """builds a HTML link that uses the js toggleVisibility function""" + return u"javascript: toggleVisibility('%s')" % nodeid + +def toggle_link(nodeid, label): + """builds a HTML link that uses the js toggleVisibility function""" + return u'%s' % (toggle_action(nodeid), label) + + +def ureport_as_html(layout): + from logilab.common.ureports import HTMLWriter + formater = HTMLWriter(True) + stream = StringIO() #UStringIO() don't want unicode assertion + formater.format(layout, stream) + res = stream.getvalue() + if isinstance(res, str): + res = unicode(res, 'UTF8') + return res + +# traceback formatting ######################################################## + +import traceback + +def rest_traceback(info, exception): + """return a ReST formated traceback""" + res = [u'Traceback\n---------\n::\n'] + for stackentry in traceback.extract_tb(info[2]): + res.append(u'\tFile %s, line %s, function %s' % tuple(stackentry[:3])) + if stackentry[3]: + res.append(u'\t %s' % stackentry[3].decode('utf-8', 'replace')) + res.append(u'\n') + try: + res.append(u'\t Error: %s\n' % exception) + except: + pass + return u'\n'.join(res) + + +def html_traceback(info, exception, title='', + encoding='ISO-8859-1', body=''): + """ return an html formatted traceback from python exception infos. + """ + tcbk = info[2] + stacktb = traceback.extract_tb(tcbk) + strings = [] + if body: + strings.append(u'
    ') + # FIXME + strings.append(body) + strings.append(u'
    ') + if title: + strings.append(u'

    %s

    '% xml_escape(title)) + try: + strings.append(u'

    %s

    ' % xml_escape(str(exception)).replace("\n","
    ")) + except UnicodeError: + pass + strings.append(u'
    ') + for index, stackentry in enumerate(stacktb): + strings.append(u'File %s, line ' + u'%s, function ' + u'%s:
    '%( + xml_escape(stackentry[0]), stackentry[1], xml_escape(stackentry[2]))) + if stackentry[3]: + string = xml_escape(stackentry[3]).decode('utf-8', 'replace') + strings.append(u'  %s
    \n' % (string)) + # add locals info for each entry + try: + local_context = tcbk.tb_frame.f_locals + html_info = [] + chars = 0 + for name, value in local_context.iteritems(): + value = xml_escape(repr(value)) + info = u'%s=%s, ' % (name, value) + line_length = len(name) + len(value) + chars += line_length + # 150 is the result of *years* of research ;-) (CSS might be helpful here) + if chars > 150: + info = u'
    ' + info + chars = line_length + html_info.append(info) + boxid = 'ctxlevel%d' % index + strings.append(u'[%s]' % toggle_link(boxid, '+')) + strings.append(u'' % + (boxid, ''.join(html_info))) + tcbk = tcbk.tb_next + except Exception: + pass # doesn't really matter if we have no context info + strings.append(u'
    ') + return '\n'.join(strings) + +# csv files / unicode support ################################################# + +class UnicodeCSVWriter: + """proxies calls to csv.writer.writerow to be able to deal with unicode""" + + def __init__(self, wfunc, encoding, **kwargs): + self.writer = csv.writer(self, **kwargs) + self.wfunc = wfunc + self.encoding = encoding + + def write(self, data): + self.wfunc(data) + + def writerow(self, row): + csvrow = [] + for elt in row: + if isinstance(elt, unicode): + csvrow.append(elt.encode(self.encoding)) + else: + csvrow.append(str(elt)) + self.writer.writerow(csvrow) + + def writerows(self, rows): + for row in rows: + self.writerow(row) + + +# some decorators ############################################################# + +class limitsize(object): + def __init__(self, maxsize): + self.maxsize = maxsize + + def __call__(self, function): + def newfunc(*args, **kwargs): + ret = function(*args, **kwargs) + if isinstance(ret, basestring): + return ret[:self.maxsize] + return ret + return newfunc + + +def htmlescape(function): + def newfunc(*args, **kwargs): + ret = function(*args, **kwargs) + assert isinstance(ret, basestring) + return xml_escape(ret) + return newfunc diff -r 15d541321a8c -r 74c1597f8a82 utils.py --- a/utils.py Wed Jan 20 10:13:02 2010 +0100 +++ b/utils.py Wed Jan 20 10:13:45 2010 +0100 @@ -18,6 +18,7 @@ from time import time, mktime from random import randint, seed from calendar import monthrange +import decimal # initialize random seed from current time seed() diff -r 15d541321a8c -r 74c1597f8a82 view.py --- a/view.py Wed Jan 20 10:13:02 2010 +0100 +++ b/view.py Wed Jan 20 10:13:45 2010 +0100 @@ -21,7 +21,6 @@ from cubicweb import NotAnEntity from cubicweb.selectors import yes, non_final_entity, nonempty_rset, none_rset -from cubicweb.selectors import require_group_compat, accepts_compat from cubicweb.appobject import AppObject from cubicweb.utils import UStringIO, HTMLStream from cubicweb.schema import display_name @@ -97,7 +96,6 @@ time to a write function to use. """ __registry__ = 'views' - registered = require_group_compat(AppObject.registered) templatable = True # content_type = 'application/xhtml+xml' # text/xhtml' @@ -119,12 +117,12 @@ return True def __init__(self, req=None, rset=None, **kwargs): - super(View, self).__init__(req, rset, **kwargs) + super(View, self).__init__(req, rset=rset, **kwargs) self.w = None @property def content_type(self): - return self.req.html_content_type() + return self._cw.html_content_type() def set_stream(self, w=None): if self.w is not None: @@ -167,7 +165,20 @@ if stream is not None: return self._stream.getvalue() - dispatch = deprecated('.dispatch is deprecated, use .render')(render) + def tal_render(self, template, variables): + """render a precompiled page template with variables in the given + dictionary as context + """ + from cubicweb.ext.tal import CubicWebContext + context = CubicWebContext() + context.update({'self': self, 'rset': self.cw_rset, '_' : self._cw._, + 'req': self._cw, 'user': self._cw.user}) + context.update(variables) + output = UStringIO() + template.expand(context, output) + return output.getvalue() + + dispatch = deprecated('[3.4] .dispatch is deprecated, use .render')(render) # should default .call() method add a
    around each # rset item @@ -180,15 +191,15 @@ Views applicable on None result sets have to override this method """ - rset = self.rset + rset = self.cw_rset if rset is None: - raise NotImplementedError, self + raise NotImplementedError, (self, "an rset is required") wrap = self.templatable and len(rset) > 1 and self.add_div_section # XXX propagate self.extra_kwars? for i in xrange(len(rset)): if wrap: self.w(u'
    ') - self.wview(self.id, rset, row=i, **kwargs) + self.wview(self.__regid__, rset, row=i, **kwargs) if wrap: self.w(u"
    ") @@ -206,23 +217,23 @@ return True def is_primary(self): - return self.extra_kwargs.get('is_primary', self.id == 'primary') + return self.cw_extra_kwargs.get('is_primary', self.__regid__ == 'primary') def url(self): """return the url associated with this view. Should not be necessary for non linkable views, but a default implementation is provided anyway. """ - rset = self.rset + rset = self.cw_rset if rset is None: - return self.build_url('view', vid=self.id) + return self._cw.build_url('view', vid=self.__regid__) coltypes = rset.column_types(0) if len(coltypes) == 1: etype = iter(coltypes).next() - if not self.schema.eschema(etype).final: + if not self._cw.vreg.schema.eschema(etype).final: if len(rset) == 1: entity = rset.get_entity(0, 0) - return entity.absolute_url(vid=self.id) + return entity.absolute_url(vid=self.__regid__) # don't want to generate / url if there is some restriction # on something else than the entity type restr = rset.syntax_tree().children[0].where @@ -232,25 +243,25 @@ norestriction = (isinstance(restr, nodes.Relation) and restr.is_types_restriction()) if norestriction: - return self.build_url(etype.lower(), vid=self.id) - return self.build_url('view', rql=rset.printable_rql(), vid=self.id) + return self._cw.build_url(etype.lower(), vid=self.__regid__) + return self._cw.build_url('view', rql=rset.printable_rql(), vid=self.__regid__) def set_request_content_type(self): """set the content type returned by this view""" - self.req.set_content_type(self.content_type) + self._cw.set_content_type(self.content_type) # view utilities ########################################################## def wview(self, __vid, rset=None, __fallback_vid=None, **kwargs): """shortcut to self.view method automatically passing self.w as argument """ - self.view(__vid, rset, __fallback_vid, w=self.w, **kwargs) + self._cw.view(__vid, rset, __fallback_vid, w=self.w, **kwargs) # XXX Template bw compat - template = deprecated('.template is deprecated, use .view')(wview) + template = deprecated('[3.4] .template is deprecated, use .view')(wview) def whead(self, data): - self.req.html_headers.write(data) + self._cw.html_headers.write(data) def wdata(self, data): """simple helper that escapes `data` and writes into `self.w`""" @@ -268,34 +279,34 @@ """returns a title according to the result set - used for the title in the HTML header """ - vtitle = self.req.form.get('vtitle') + vtitle = self._cw.form.get('vtitle') if vtitle: - return self.req._(vtitle) + return self._cw._(vtitle) # class defined title will only be used if the resulting title doesn't # seem clear enough vtitle = getattr(self, 'title', None) or u'' if vtitle: - vtitle = self.req._(vtitle) - rset = self.rset + vtitle = self._cw._(vtitle) + rset = self.cw_rset if rset and rset.rowcount: if rset.rowcount == 1: try: - entity = self.complete_entity(0) + entity = rset.complete_entity(0, 0) # use long_title to get context information if any clabel = entity.dc_long_title() except NotAnEntity: - clabel = display_name(self.req, rset.description[0][0]) + clabel = display_name(self._cw, rset.description[0][0]) clabel = u'%s (%s)' % (clabel, vtitle) else : etypes = rset.column_types(0) if len(etypes) == 1: etype = iter(etypes).next() - clabel = display_name(self.req, etype, 'plural') + clabel = display_name(self._cw, etype, 'plural') else : clabel = u'#[*] (%s)' % vtitle else: clabel = vtitle - return u'%s (%s)' % (clabel, self.req.property_value('ui.site-title')) + return u'%s (%s)' % (clabel, self._cw.property_value('ui.site-title')) def output_url_builder( self, name, url, args ): self.w(u'\n') def create_url(self, etype, **kwargs): - """return the url of the entity creation form for a given entity type""" - return self.req.build_url('add/%s' % etype, **kwargs) + """ return the url of the entity creation form for a given entity type""" + return self._cw.build_url('add/%s' % etype, **kwargs) def field(self, label, value, row=True, show_label=True, w=None, tr=True, table=False): """read-only field""" @@ -323,7 +334,7 @@ w(u'
    ') if show_label and label: if tr: - label = display_name(self.req, label) + label = display_name(self._cw, label) if table: w(u'%s' % label) else: @@ -343,8 +354,6 @@ class EntityView(View): """base class for views applying on an entity (i.e. uniform result set)""" __select__ = non_final_entity() - registered = accepts_compat(View.registered) - category = 'entityview' @@ -353,7 +362,6 @@ displayed (so they can always be displayed !) """ __select__ = none_rset() - registered = require_group_compat(View.registered) category = 'startupview' @@ -375,7 +383,7 @@ default_rql = None def __init__(self, req, rset=None, **kwargs): - super(EntityStartupView, self).__init__(req, rset, **kwargs) + super(EntityStartupView, self).__init__(req, rset=rset, **kwargs) if rset is None: # this instance is not in the "entityview" category self.category = 'startupview' @@ -388,11 +396,11 @@ """override call to execute rql returned by the .startup_rql method if necessary """ - if self.rset is None: - self.rset = self.req.execute(self.startup_rql()) - rset = self.rset + rset = self.cw_rset + if rset is None: + rset = self.cw_rset = self._cw.execute(self.startup_rql()) for i in xrange(len(rset)): - self.wview(self.id, rset, row=i, **kwargs) + self.wview(self.__regid__, rset, row=i, **kwargs) class AnyRsetView(View): @@ -403,18 +411,18 @@ def columns_labels(self, mainindex=0, tr=True): if tr: - translate = lambda val, req=self.req: display_name(req, val) + translate = lambda val, req=self._cw: display_name(req, val) else: translate = lambda val: val # XXX [0] because of missing Union support - rqlstdescr = self.rset.syntax_tree().get_description(mainindex, - translate)[0] + rqlstdescr = self.cw_rset.syntax_tree().get_description(mainindex, + translate)[0] labels = [] for colindex, label in enumerate(rqlstdescr): # compute column header if label == 'Any': # find a better label label = ','.join(translate(et) - for et in self.rset.column_types(colindex)) + for et in self.cw_rset.column_types(colindex)) labels.append(label) return labels @@ -426,11 +434,10 @@ There is usually at least a regular main template and a simple fallback one to display error if the first one failed """ - registered = require_group_compat(View.registered) @property def doctype(self): - if self.req.xhtml_browser(): + if self._cw.xhtml_browser(): return STRICT_DOCTYPE return STRICT_DOCTYPE_NOEXT @@ -441,7 +448,7 @@ if self.binary: self._stream = stream = StringIO() else: - self._stream = stream = HTMLStream(self.req) + self._stream = stream = HTMLStream(self._cw) w = stream.write else: stream = None @@ -466,16 +473,16 @@ """register the given user callback and return an url to call it ready to be inserted in html """ - self.req.add_js('cubicweb.ajax.js') + self._cw.add_js('cubicweb.ajax.js') if nonify: _cb = cb def cb(*args): _cb(*args) - cbname = self.req.register_onetime_callback(cb, *args) + cbname = self._cw.register_onetime_callback(cb, *args) return self.build_js(cbname, xml_escape(msg or '')) def build_update_js_call(self, cbname, msg): - rql = self.rset.printable_rql() + rql = self.cw_rset.printable_rql() return "javascript:userCallbackThenUpdateUI('%s', '%s', %s, %s, '%s', '%s')" % ( cbname, self.id, dumps(rql), dumps(msg), self.__registry__, self.div_id()) @@ -493,12 +500,12 @@ """base class for components""" __registry__ = 'components' __select__ = yes() - property_defs = {} # XXX huummm, much probably useless htmlclass = 'mainRelated' def div_class(self): - return '%s %s' % (self.htmlclass, self.id) - # XXX a generic '%s%s' % (self.id, self.__registry__.capitalize()) would probably be nicer + return '%s %s' % (self.htmlclass, self.__regid__) + + # XXX a generic '%s%s' % (self.__regid__, self.__registry__.capitalize()) would probably be nicer def div_id(self): - return '%sComponent' % self.id + return '%sComponent' % self.__regid__ diff -r 15d541321a8c -r 74c1597f8a82 vregistry.py --- a/vregistry.py Wed Jan 20 10:13:02 2010 +0100 +++ b/vregistry.py Wed Jan 20 10:13:45 2010 +0100 @@ -54,6 +54,21 @@ return _toload +def classid(cls): + """returns a unique identifier for an appobject class""" + return '%s.%s' % (cls.__module__, cls.__name__) + +def class_regid(cls): + """returns a unique identifier for an appobject class""" + if 'id' in cls.__dict__: + warn('[3.6] %s.%s: id is deprecated, use __regid__' + % (cls.__module__, cls.__name__), DeprecationWarning) + cls.__regid__ = cls.id + if hasattr(cls, 'id') and not isinstance(cls.id, property): + return cls.id + return cls.__regid__ + + class Registry(dict): def __init__(self, config): @@ -72,15 +87,13 @@ def register(self, obj, oid=None, clear=False): """base method to add an object in the registry""" assert not '__abstract__' in obj.__dict__ - oid = oid or obj.id + oid = oid or class_regid(obj) assert oid if clear: appobjects = self[oid] = [] else: appobjects = self.setdefault(oid, []) - # registered() is technically a classmethod but is not declared - # as such because we need to compose registered in some cases - appobject = obj.registered.im_func(obj, self) + appobject = obj.__registered__(self) assert not appobject in appobjects, \ 'object %s is already registered' % appobject assert callable(appobject.__select__), appobject @@ -90,11 +103,11 @@ # XXXFIXME this is a duplication of unregister() # remove register_and_replace in favor of unregister + register # or simplify by calling unregister then register here - if hasattr(replaced, 'classid'): - replaced = replaced.classid() - registered_objs = self.get(obj.id, ()) + if not isinstance(replaced, basestring): + replaced = classid(replaced) + registered_objs = self.get(class_regid(obj), ()) for index, registered in enumerate(registered_objs): - if registered.classid() == replaced: + if classid(registered) == replaced: del registered_objs[index] break else: @@ -103,17 +116,18 @@ self.register(obj) def unregister(self, obj): - oid = obj.classid() - for registered in self.get(obj.id, ()): + clsid = classid(obj) + oid = class_regid(obj) + for registered in self.get(oid, ()): # use classid() to compare classes because vreg will probably # have its own version of the class, loaded through execfile - if registered.classid() == oid: + if classid(registered) == clsid: # XXX automatic reloading management - self[obj.id].remove(registered) + self[oid].remove(registered) break else: self.warning('can\'t remove %s, no id %s in the registry', - oid, obj.id) + clsid, oid) def all_objects(self): """return a list containing all objects in this registry. @@ -125,6 +139,7 @@ # dynamic selection methods ################################################ + @deprecated('[3.6] use select instead of object_by_id') def object_by_id(self, oid, *args, **kwargs): """return object with the given oid. Only one object is expected to be found. @@ -143,9 +158,9 @@ raise `ObjectNotFound` if not object with id in raise `NoSelectableObject` if not object apply """ - return self.select_best(self[oid], *args, **kwargs) + return self._select_best(self[oid], *args, **kwargs) - def select_object(self, oid, *args, **kwargs): + def select_or_none(self, oid, *args, **kwargs): """return the most specific object among those with the given oid according to the given context, or None if no object applies. """ @@ -153,6 +168,8 @@ return self.select(oid, *args, **kwargs) except (NoSelectableObject, ObjectNotFound): return None + select_object = deprecated('[3.6] use select_or_none instead of select_object' + )(select_or_none) def possible_objects(self, *args, **kwargs): """return an iterator on possible objects in this registry for the given @@ -160,11 +177,11 @@ """ for appobjects in self.itervalues(): try: - yield self.select_best(appobjects, *args, **kwargs) + yield self._select_best(appobjects, *args, **kwargs) except NoSelectableObject: continue - def select_best(self, appobjects, *args, **kwargs): + def _select_best(self, appobjects, *args, **kwargs): """return an instance of the most specific object according to parameters @@ -195,6 +212,8 @@ # return the result of calling the appobject return winners[0](*args, **kwargs) + select_best = deprecated('[3.6] select_best is now private')(_select_best) + class VRegistry(dict): """class responsible to register, propose and select the various @@ -223,7 +242,7 @@ # dynamic selection methods ################################################ - @deprecated('use vreg[registry].object_by_id(oid, *args, **kwargs)') + @deprecated('[3.4] use vreg[registry].object_by_id(oid, *args, **kwargs)') def object_by_id(self, registry, oid, *args, **kwargs): """return object in . @@ -232,7 +251,7 @@ """ return self[registry].object_by_id(oid) - @deprecated('use vreg[registry].select(oid, *args, **kwargs)') + @deprecated('[3.4] use vreg[registry].select(oid, *args, **kwargs)') def select(self, registry, oid, *args, **kwargs): """return the most specific object in . according to the given context @@ -242,14 +261,14 @@ """ return self[registry].select(oid, *args, **kwargs) - @deprecated('use vreg[registry].select_object(oid, *args, **kwargs)') + @deprecated('[3.4] use vreg[registry].select_or_none(oid, *args, **kwargs)') def select_object(self, registry, oid, *args, **kwargs): """return the most specific object in . according to the given context, or None if no object apply """ - return self[registry].select_object(oid, *args, **kwargs) + return self[registry].select_or_none(oid, *args, **kwargs) - @deprecated('use vreg[registry].possible_objects(*args, **kwargs)') + @deprecated('[3.4] use vreg[registry].possible_objects(*args, **kwargs)') def possible_objects(self, registry, *args, **kwargs): """return an iterator on possible objects in for the given context @@ -283,7 +302,7 @@ try: if obj.__module__ != modname or obj in butclasses: continue - oid = obj.id + oid = class_regid(obj) registryname = obj.__registry__ except AttributeError: continue @@ -301,8 +320,8 @@ except AttributeError: vname = obj.__class__.__name__ self.debug('registered appobject %s in registry %s with id %s', - vname, registryname, oid or obj.id) - self._loadedmods[obj.__module__]['%s.%s' % (obj.__module__, oid)] = obj + vname, registryname, oid or class_regid(obj)) + self._loadedmods[obj.__module__][classid(obj)] = obj def unregister(self, obj, registryname=None): self[registryname or obj.__registry__].unregister(obj) @@ -394,10 +413,10 @@ return except TypeError: return - objname = '%s.%s' % (modname, obj.__name__) - if objname in self._loadedmods[modname]: + clsid = classid(obj) + if clsid in self._loadedmods[modname]: return - self._loadedmods[modname][objname] = obj + self._loadedmods[modname][clsid] = obj for parent in obj.__bases__: self._load_ancestors_then_object(modname, parent) self.load_object(obj) @@ -421,10 +440,10 @@ to a non empty string to be registered. """ if (cls.__dict__.get('__abstract__') or cls.__name__[0] == '_' - or not cls.__registry__ or not cls.id): + or not cls.__registry__ or not class_regid(cls)): return regname = cls.__registry__ - if '%s.%s' % (regname, cls.id) in self.config['disable-appobjects']: + if '%s.%s' % (regname, class_regid(cls)) in self.config['disable-appobjects']: return self.register(cls) @@ -437,11 +456,11 @@ from cubicweb.appobject import objectify_selector, AndSelector, OrSelector, Selector -objectify_selector = deprecated('objectify_selector has been moved to appobject module')(objectify_selector) +objectify_selector = deprecated('[3.4] objectify_selector has been moved to appobject module')(objectify_selector) Selector = class_moved(Selector) -@deprecated('use & operator (binary and)') +@deprecated('[3.4] use & operator (binary and)') def chainall(*selectors, **kwargs): """return a selector chaining given selectors. If one of the selectors fail, selection will fail, else the returned score @@ -454,7 +473,7 @@ selector.__name__ = kwargs['name'] return selector -@deprecated('use | operator (binary or)') +@deprecated('[3.4] use | operator (binary or)') def chainfirst(*selectors, **kwargs): """return a selector chaining given selectors. If all the selectors fail, selection will fail, else the returned score diff -r 15d541321a8c -r 74c1597f8a82 web/__init__.py --- a/web/__init__.py Wed Jan 20 10:13:02 2010 +0100 +++ b/web/__init__.py Wed Jan 20 10:13:45 2010 +0100 @@ -11,10 +11,10 @@ _ = unicode from simplejson import dumps +from urllib import quote as urlquote from logilab.common.deprecation import deprecated -from urllib import quote as urlquote from cubicweb.web._exceptions import * from cubicweb.utils import CubicWebJsonEncoder @@ -57,7 +57,7 @@ return json_dumps(repr(value)) return newfunc -@deprecated('use req.build_ajax_replace_url() instead') +@deprecated('[3.4] use req.build_ajax_replace_url() instead') def ajax_replace_url(nodeid, rql, vid=None, swap=False, **extraparams): """builds a replacePageChunk-like url >>> ajax_replace_url('foo', 'Person P') diff -r 15d541321a8c -r 74c1597f8a82 web/_exceptions.py --- a/web/_exceptions.py Wed Jan 20 10:13:02 2010 +0100 +++ b/web/_exceptions.py Wed Jan 20 10:13:45 2010 +0100 @@ -19,6 +19,10 @@ class NothingToEdit(RequestError): """raised when an edit request doesn't specify any eid to edit""" +class ProcessFormError(RequestError): + """raised when posted data can't be processed by the corresponding field + """ + class NotFound(RequestError): """raised when a 404 error should be returned""" diff -r 15d541321a8c -r 74c1597f8a82 web/action.py --- a/web/action.py Wed Jan 20 10:13:02 2010 +0100 +++ b/web/action.py Wed Jan 20 10:13:45 2010 +0100 @@ -10,8 +10,7 @@ from cubicweb import target from cubicweb.selectors import (partial_relation_possible, match_search_state, - one_line_rset, partial_may_add_relation, yes, - accepts_compat, condition_compat, deprecate) + one_line_rset, partial_may_add_relation, yes) from cubicweb.appobject import AppObject @@ -22,7 +21,7 @@ __registry__ = 'actions' __select__ = match_search_state('normal') - property_defs = { + cw_property_defs = { 'visible': dict(type='Boolean', default=True, help=_('display the action or not')), 'order': dict(type='Int', default=99, @@ -50,13 +49,13 @@ raise NotImplementedError def html_class(self): - if self.req.selected(self.url()): + if self._cw.selected(self.url()): return 'selected' if self.category: return 'box' + self.category.capitalize() def build_action(self, title, path, **kwargs): - return UnregisteredAction(self.req, self.rset, title, path, **kwargs) + return UnregisteredAction(self._cw, self.cw_rset, title, path, **kwargs) class UnregisteredAction(Action): @@ -67,7 +66,7 @@ id = None def __init__(self, req, rset, title, path, **kwargs): - Action.__init__(self, req, rset) + Action.__init__(self, req, rset=rset) self.title = req._(title) self._path = path self.__dict__.update(kwargs) @@ -86,21 +85,13 @@ __select__ = (match_search_state('normal') & one_line_rset() & partial_relation_possible(action='add') & partial_may_add_relation()) - registered = accepts_compat(Action.registered) submenu = 'addrelated' def url(self): - current_entity = self.rset.get_entity(self.row or 0, self.col or 0) + current_entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) linkto = '%s:%s:%s' % (self.rtype, current_entity.eid, target(self)) - return self.build_url('add/%s' % self.etype, __linkto=linkto, - __redirectpath=current_entity.rest_path(), # should not be url quoted! - __redirectvid=self.req.form.get('__redirectvid', '')) + return self._cw.build_url('add/%s' % self.etype, __linkto=linkto, + __redirectpath=current_entity.rest_path(), # should not be url quoted! + __redirectvid=self._cw.form.get('__redirectvid', '')) -class EntityAction(Action): - """DEPRECATED / BACKWARD COMPAT - """ - registered = deprecate(condition_compat(accepts_compat(Action.registered)), - msg='EntityAction is deprecated, use Action with ' - 'appropriate selectors') - diff -r 15d541321a8c -r 74c1597f8a82 web/application.py --- a/web/application.py Wed Jan 20 10:13:02 2010 +0100 +++ b/web/application.py Wed Jan 20 10:13:45 2010 +0100 @@ -29,19 +29,19 @@ class AbstractSessionManager(component.Component): """manage session data associated to a session identifier""" - id = 'sessionmanager' + __regid__ = 'sessionmanager' - def __init__(self): - self.session_time = self.vreg.config['http-session-time'] or None + def __init__(self, vreg): + self.session_time = vreg.config['http-session-time'] or None assert self.session_time is None or self.session_time > 0 - self.cleanup_session_time = self.vreg.config['cleanup-session-time'] or 43200 + self.cleanup_session_time = vreg.config['cleanup-session-time'] or 43200 assert self.cleanup_session_time > 0 - self.cleanup_anon_session_time = self.vreg.config['cleanup-anonymous-session-time'] or 120 + self.cleanup_anon_session_time = vreg.config['cleanup-anonymous-session-time'] or 120 assert self.cleanup_anon_session_time > 0 if self.session_time: assert self.cleanup_session_time < self.session_time assert self.cleanup_anon_session_time < self.session_time - self.authmanager = self.vreg['components'].select('authmanager') + self.authmanager = vreg['components'].select('authmanager', vreg=vreg) def clean_sessions(self): """cleanup sessions which has not been unused since a given amount of @@ -92,6 +92,10 @@ class AbstractAuthenticationManager(component.Component): """authenticate user associated to a request and check session validity""" id = 'authmanager' + vreg = None # XXX necessary until property for deprecation warning is on appobject + + def __init__(self, vreg): + self.vreg = vreg def authenticate(self, req): """authenticate user and return corresponding user object @@ -113,7 +117,8 @@ def __init__(self, appli): self.vreg = appli.vreg - self.session_manager = self.vreg['components'].select('sessionmanager') + self.session_manager = self.vreg['components'].select('sessionmanager', + vreg=self.vreg) global SESSION_MANAGER SESSION_MANAGER = self.session_manager if not 'last_login_time' in self.vreg.schema: @@ -122,7 +127,8 @@ def reset_session_manager(self): data = self.session_manager.dump_data() - self.session_manager = self.vreg['components'].select('sessionmanager') + self.session_manager = self.vreg['components'].select('sessionmanager', + vreg=self.vreg) self.session_manager.restore_data(data) global SESSION_MANAGER SESSION_MANAGER = self.session_manager @@ -252,7 +258,8 @@ CW_EVENT_MANAGER.bind('after-registry-reload', self.set_urlresolver) def set_urlresolver(self): - self.url_resolver = self.vreg['components'].select('urlpublisher') + self.url_resolver = self.vreg['components'].select('urlpublisher', + vreg=self.vreg) def connect(self, req): """return a connection for a logged user object according to existing @@ -285,7 +292,7 @@ finally: self._logfile_lock.release() - @deprecated("use vreg.select('controllers', ...)") + @deprecated("[3.4] use vreg['controllers'].select(...)") def select_controller(self, oid, req): try: return self.vreg['controllers'].select(oid, req=req, appli=self) @@ -368,7 +375,7 @@ def validation_error_handler(self, req, ex): ex.errors = dict((k, v) for k, v in ex.errors.items()) if '__errorurl' in req.form: - forminfo = {'errors': ex, + forminfo = {'error': ex, 'values': req.form, 'eidmap': req.data.get('eidmap', {}) } diff -r 15d541321a8c -r 74c1597f8a82 web/box.py --- a/web/box.py Wed Jan 20 10:13:02 2010 +0100 +++ b/web/box.py Wed Jan 20 10:13:45 2010 +0100 @@ -13,9 +13,7 @@ from cubicweb import Unauthorized, role as get_role, target as get_target from cubicweb.schema import display_name from cubicweb.selectors import (one_line_rset, primary_view, - match_context_prop, partial_has_related_entities, - accepts_compat, has_relation_compat, - condition_compat, require_group_compat) + match_context_prop, partial_has_related_entities) from cubicweb.view import View, ReloadableMixIn from cubicweb.web.htmlwidgets import (BoxLink, BoxWidget, SideBoxWidget, @@ -39,10 +37,9 @@ """ __registry__ = 'boxes' __select__ = match_context_prop() - registered = classmethod(require_group_compat(View.registered)) categories_in_order = () - property_defs = { + cw_property_defs = { _('visible'): dict(type='Boolean', default=True, help=_('display the box or not')), _('order'): dict(type='Int', default=99, @@ -80,18 +77,18 @@ return self.box_action(self._action(title, path, **kwargs)) def _action(self, title, path, **kwargs): - return UnregisteredAction(self.req, self.rset, title, path, **kwargs) + return UnregisteredAction(self._cw, self.cw_rset, title, path, **kwargs) # formating callbacks def boxitem_link_tooltip(self, action): - if action.id: - return u'keyword: %s' % action.id + if action.__regid__: + return u'keyword: %s' % action.__regid__ return u'' def box_action(self, action): cls = getattr(action, 'html_class', lambda: None)() or self.htmlitemclass - return BoxLink(action.url(), self.req._(action.title), + return BoxLink(action.url(), self._cw._(action.title), cls, self.boxitem_link_tooltip(action)) @@ -108,18 +105,18 @@ rql = None def to_display_rql(self): - assert self.rql is not None, self.id + assert self.rql is not None, self.__regid__ return (self.rql,) def call(self, **kwargs): try: - rset = self.req.execute(*self.to_display_rql()) + rset = self._cw.execute(*self.to_display_rql()) except Unauthorized: # can't access to something in the query, forget this box return if len(rset) == 0: return - box = BoxWidget(self.req._(self.title), self.id) + box = BoxWidget(self._cw._(self.title), self.__regid__) for i, (teid, tname) in enumerate(rset): entity = rset.get_entity(i, 0) box.append(self.mk_action(tname, entity.absolute_url())) @@ -132,14 +129,13 @@ """ def to_display_rql(self): - assert self.rql is not None, self.id - return (self.rql, {'x': self.req.user.eid}, 'x') + assert self.rql is not None, self.__regid__ + return (self.rql, {'x': self._cw.user.eid}, 'x') class EntityBoxTemplate(BoxTemplate): """base class for boxes related to a single entity""" __select__ = BoxTemplate.__select__ & one_line_rset() & primary_view() - registered = accepts_compat(has_relation_compat(condition_compat(BoxTemplate.registered))) context = 'incontext' def call(self, row=0, col=0, **kwargs): @@ -151,12 +147,12 @@ __select__ = EntityBoxTemplate.__select__ & partial_has_related_entities() def cell_call(self, row, col, **kwargs): - entity = self.rset.get_entity(row, col) - limit = self.req.property_value('navigation.related-limit') + 1 + entity = self.cw_rset.get_entity(row, col) + limit = self._cw.property_value('navigation.related-limit') + 1 role = get_role(self) self.w(u'') @@ -169,9 +165,9 @@ """ def cell_call(self, row, col, view=None, **kwargs): - self.req.add_js('cubicweb.ajax.js') - entity = self.rset.get_entity(row, col) - box = SideBoxWidget(display_name(self.req, self.rtype), self.id) + self._cw.add_js('cubicweb.ajax.js') + entity = self.cw_rset.get_entity(row, col) + box = SideBoxWidget(display_name(self._cw, self.rtype), self.__regid__) related = self.related_boxitems(entity) unrelated = self.unrelated_boxitems(entity) box.extend(related) @@ -181,7 +177,7 @@ box.render(self.w) def div_id(self): - return self.id + return self.__regid__ def box_item(self, entity, etarget, rql, label): """builds HTML link to edit relation between `entity` and `etarget` @@ -222,12 +218,12 @@ return entity.unrelated(self.rtype, self.etype, get_role(self)).entities() # in other cases, use vocabulary functions entities = [] - form = self.vreg['forms'].select('edition', self.req, rset=self.rset, - row=self.row or 0) + form = self.vreg['forms'].select('edition', self._cw, rset=self.cw_rset, + row=self.cw_row or 0) field = form.field_by_name(self.rtype, get_role(self), entity.e_schema) - for _, eid in form.form_field_vocabulary(field): + for _, eid in field.choices(form): if eid is not None: - rset = self.req.eid_rset(eid) + rset = self._cw.eid_rset(eid) entities.append(rset.get_entity(0, 0)) return entities diff -r 15d541321a8c -r 74c1597f8a82 web/component.py --- a/web/component.py Wed Jan 20 10:13:02 2010 +0100 +++ b/web/component.py Wed Jan 20 10:13:45 2010 +0100 @@ -15,11 +15,10 @@ from cubicweb import role from cubicweb.utils import merge_dicts -from cubicweb.view import View, Component +from cubicweb.view import Component from cubicweb.selectors import ( paginated_rset, one_line_rset, primary_view, match_context_prop, - partial_has_related_entities, condition_compat, accepts_compat, - has_relation_compat) + partial_has_related_entities) class EntityVComponent(Component): @@ -35,9 +34,8 @@ __registry__ = 'contentnavigation' __select__ = one_line_rset() & primary_view() & match_context_prop() - registered = accepts_compat(has_relation_compat(condition_compat(View.registered))) - property_defs = { + cw_property_defs = { _('visible'): dict(type='Boolean', default=True, help=_('display the component or not')), _('order'): dict(type='Int', default=99, @@ -60,10 +58,10 @@ class NavigationComponent(Component): """abstract base class for navigation components""" - id = 'navigation' + __regid__ = 'navigation' __select__ = paginated_rset() - property_defs = { + cw_property_defs = { _('visible'): dict(type='Boolean', default=True, help=_('display the component or not')), } @@ -78,7 +76,7 @@ no_next_page_link = u'>>' def __init__(self, req, rset, **kwargs): - super(NavigationComponent, self).__init__(req, rset, **kwargs) + super(NavigationComponent, self).__init__(req, rset=rset, **kwargs) self.starting_from = 0 self.total = rset.rowcount @@ -86,12 +84,12 @@ try: return self._page_size except AttributeError: - page_size = self.extra_kwargs.get('page_size') + page_size = self.cw_extra_kwargs.get('page_size') if page_size is None: - if 'page_size' in self.req.form: - page_size = int(self.req.form['page_size']) + if 'page_size' in self._cw.form: + page_size = int(self._cw.form['page_size']) else: - page_size = self.req.property_value(self.page_size_property) + page_size = self._cw.property_value(self.page_size_property) self._page_size = page_size return page_size @@ -102,11 +100,11 @@ def page_boundaries(self): try: - stop = int(self.req.form[self.stop_param]) + 1 - start = int(self.req.form[self.start_param]) + stop = int(self._cw.form[self.stop_param]) + 1 + start = int(self._cw.form[self.start_param]) except KeyError: start, stop = 0, self.page_size - if start >= len(self.rset): + if start >= len(self.cw_rset): start, stop = 0, self.page_size self.starting_from = start return start, stop @@ -121,13 +119,13 @@ params = merge_dicts(params, {self.start_param : start, self.stop_param : stop,}) if path == 'json': - rql = params.pop('rql', self.rset.printable_rql()) + rql = params.pop('rql', self.cw_rset.printable_rql()) # latest 'true' used for 'swap' mode url = 'javascript: replacePageChunk(%s, %s, %s, %s, true)' % ( dumps(params.get('divid', 'paginated-content')), dumps(rql), dumps(params.pop('vid', None)), dumps(params)) else: - url = self.build_url(path, **params) + url = self._cw.build_url(path, **params) return url def page_link(self, path, params, start, stop, content): @@ -167,15 +165,15 @@ def cell_call(self, row, col, view=None): rql = self.rql() if rql is None: - entity = self.rset.get_entity(row, col) + entity = self.cw_rset.get_entity(row, col) rset = entity.related(self.rtype, role(self)) else: - eid = self.rset[row][col] - rset = self.req.execute(self.rql(), {'x': eid}, 'x') + eid = self.cw_rset[row][col] + rset = self._cw.execute(self.rql(), {'x': eid}, 'x') if not rset.rowcount: return self.w(u'
    ' % self.div_class()) - self.w(u'

    %s

    \n' % self.req._(self.title).capitalize()) + self.w(u'

    %s

    \n' % self._cw._(self.title).capitalize()) self.wview(self.vid, rset) self.w(u'
    ') diff -r 15d541321a8c -r 74c1597f8a82 web/controller.py --- a/web/controller.py Wed Jan 20 10:13:02 2010 +0100 +++ b/web/controller.py Wed Jan 20 10:13:45 2010 +0100 @@ -11,7 +11,7 @@ import datetime from cubicweb import typed_eid -from cubicweb.selectors import yes, require_group_compat +from cubicweb.selectors import yes from cubicweb.appobject import AppObject from cubicweb.web import LOGGER, Redirect, RequestError @@ -35,19 +35,6 @@ params[navparam] = form[redirectparam] return params -def parse_relations_descr(rdescr): - """parse a string describing some relations, in the form - subjeids:rtype:objeids - where subjeids and objeids are eids separeted by a underscore - - return an iterator on (subject eid, relation type, object eid) found - """ - for rstr in rdescr: - subjs, rtype, objs = rstr.split(':') - for subj in subjs.split('_'): - for obj in objs.split('_'): - yield typed_eid(subj), rtype, typed_eid(obj) - def append_url_params(url, params): """append raw parameters to the url. Given parameters, if any, are expected to be already url-quoted. @@ -68,7 +55,6 @@ """ __registry__ = 'controllers' __select__ = yes() - registered = require_group_compat(AppObject.registered) def __init__(self, *args, **kwargs): self.appli = kwargs.pop('appli', None) @@ -88,15 +74,15 @@ def process_rql(self, rql): """execute rql if specified""" # XXX assigning to self really necessary? - self.rset = None + self.cw_rset = None if rql: - self.ensure_ro_rql(rql) + self._cw.ensure_ro_rql(rql) if not isinstance(rql, unicode): - rql = unicode(rql, self.req.encoding) - pp = self.vreg['components'].select_object('magicsearch', self.req) + rql = unicode(rql, self._cw.encoding) + pp = self._cw.vreg['components'].select_or_none('magicsearch', self._cw) if pp is not None: - self.rset = pp.process_query(rql, self.req) - return self.rset + self.cw_rset = pp.process_query(rql) + return self.cw_rset def check_expected_params(self, params): """check that the given list of parameters are specified in the form @@ -104,7 +90,7 @@ """ missing = [] for param in params: - if not self.req.form.get(param): + if not self._cw.form.get(param): missing.append(param) if missing: raise RequestError('missing required parameter(s): %s' @@ -124,7 +110,7 @@ redirect_info = set() eidtypes = tuple(eidtypes) for eid, etype in eidtypes: - entity = self.req.entity_from_eid(eid, etype) + entity = self._cw.entity_from_eid(eid, etype) path, params = entity.after_deletion_path() redirect_info.add( (path, tuple(params.iteritems())) ) entity.delete() @@ -134,25 +120,9 @@ else: self._after_deletion_path = iter(redirect_info).next() if len(eidtypes) > 1: - self.req.set_message(self.req._('entities deleted')) + self._cw.set_message(self._cw._('entities deleted')) else: - self.req.set_message(self.req._('entity deleted')) - - def delete_relations(self, rdefs): - """delete relations from the repository""" - # FIXME convert to using the syntax subject:relation:eids - execute = self.req.execute - for subj, rtype, obj in rdefs: - rql = 'DELETE X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype - execute(rql, {'x': subj, 'y': obj}, ('x', 'y')) - self.req.set_message(self.req._('relations deleted')) - - def insert_relations(self, rdefs): - """insert relations into the repository""" - execute = self.req.execute - for subj, rtype, obj in rdefs: - rql = 'SET X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype - execute(rql, {'x': subj, 'y': obj}, ('x', 'y')) + self._cw.set_message(self._cw._('entity deleted')) def reset(self): @@ -161,11 +131,11 @@ """ newparams = {} # sets message if needed - if self.req.message: - newparams['__message'] = self.req.message - if self.req.form.has_key('__action_apply'): + if self._cw.message: + newparams['__message'] = self._cw.message + if self._cw.form.has_key('__action_apply'): self._return_to_edition_view(newparams) - if self.req.form.has_key('__action_cancel'): + if self._cw.form.has_key('__action_cancel'): self._return_to_lastpage(newparams) else: self._return_to_original_view(newparams) @@ -174,7 +144,7 @@ def _return_to_original_view(self, newparams): """validate-button case""" # transforms __redirect[*] parameters into regular form parameters - newparams.update(redirect_params(self.req.form)) + newparams.update(redirect_params(self._cw.form)) # find out if we have some explicit `rql` needs rql = newparams.pop('rql', None) # if rql is needed (explicit __redirectrql or multiple deletions for @@ -182,9 +152,9 @@ if rql: path = 'view' newparams['rql'] = rql - elif '__redirectpath' in self.req.form: + elif '__redirectpath' in self._cw.form: # if redirect path was explicitly specified in the form, use it - path = self.req.form['__redirectpath'] + path = self._cw.form['__redirectpath'] if self._edited_entity and path != self._edited_entity.rest_path(): # XXX may be here on modification? if yes the message should be # modified where __createdpath is detected (cw.web.request) @@ -199,19 +169,19 @@ path = self._edited_entity.rest_path() else: path = 'view' - url = self.build_url(path, **newparams) - url = append_url_params(url, self.req.form.get('__redirectparams')) + url = self._cw.build_url(path, **newparams) + url = append_url_params(url, self._cw.form.get('__redirectparams')) raise Redirect(url) def _return_to_edition_view(self, newparams): """apply-button case""" - form = self.req.form + form = self._cw.form if self._edited_entity: path = self._edited_entity.rest_path() newparams.pop('rql', None) # else, fallback on the old `view?rql=...` url form - elif 'rql' in self.req.form: + elif 'rql' in self._cw.form: path = 'view' newparams['rql'] = form['rql'] else: @@ -224,7 +194,7 @@ for redirectparam in NAV_FORM_PARAMETERS: if redirectparam in form: newparams[redirectparam] = form[redirectparam] - raise Redirect(self.build_url(path, **newparams)) + raise Redirect(self._cw.build_url(path, **newparams)) def _return_to_lastpage(self, newparams): @@ -233,13 +203,13 @@ __redirectpath is specifying that place if found, else we look in the request breadcrumbs for the last visited page. """ - if '__redirectpath' in self.req.form: + if '__redirectpath' in self._cw.form: # if redirect path was explicitly specified in the form, use it - path = self.req.form['__redirectpath'] - url = self.build_url(path, **newparams) - url = append_url_params(url, self.req.form.get('__redirectparams')) + path = self._cw.form['__redirectpath'] + url = self._cw.build_url(path, **newparams) + url = append_url_params(url, self._cw.form.get('__redirectparams')) else: - url = self.req.last_visited_page() + url = self._cw.last_visited_page() raise Redirect(url) diff -r 15d541321a8c -r 74c1597f8a82 web/data/cubicweb.edition.js --- a/web/data/cubicweb.edition.js Wed Jan 20 10:13:02 2010 +0100 +++ b/web/data/cubicweb.edition.js Wed Jan 20 10:13:45 2010 +0100 @@ -331,15 +331,22 @@ for (fieldname in errors) { var errmsg = errors[fieldname]; var fieldid = fieldname + ':' + eid; - var field = jqNode(fieldname + ':' + eid); - if (field && getNodeAttribute(field, 'type') != 'hidden') { - if ( !firsterrfield ) { - firsterrfield = 'err-' + fieldid; + var suffixes = ['', '-subject', '-object']; + var found = false; + for (var i=0, length=suffixes.length; i\n' % facetid) self.w(u'
    %s
    \n' % (xml_escape(facetid), title)) if self.facet.support_and(): - _ = self.facet.req._ + _ = self.facet._cw._ self.w(u''' tag for each field's value """ self.add_media(form) - name, values, attrs = self._render_attrs(form, field) + values, attrs = self.values_and_attributes(form, field) # ensure something is rendered if not values: values = (INTERNAL_FIELD_VALUE,) - inputs = [tags.input(name=name, value=value, type=self.type, **attrs) + inputs = [tags.input(name=field.input_name(form), type=self.type, + value=value, **attrs) for value in values] return u'\n'.join(inputs) @@ -99,36 +112,47 @@ def render(self, form, field, renderer): self.add_media(form) - name, values, attrs = self._render_attrs(form, field) + values, attrs = self.values_and_attributes(form, field) assert len(values) == 1 id = attrs.pop('id') - try: - confirmname = '%s-confirm:%s' % tuple(name.rsplit(':', 1)) - except TypeError: - confirmname = '%s-confirm' % name - inputs = [tags.input(name=name, value=values[0], type=self.type, id=id, - **attrs), + inputs = [tags.input(name=field.input_name(form), + value=values[0], type=self.type, id=id, **attrs), '
    ', - tags.input(name=confirmname, value=values[0], type=self.type, - **attrs), - ' ', tags.span(form.req._('confirm password'), + tags.input(name=field.input_name(form, '-confirm'), + value=values[0], type=self.type, **attrs), + ' ', tags.span(form._cw._('confirm password'), **{'class': 'emphasis'})] return u'\n'.join(inputs) + def process_field_data(self, form, field): + passwd1 = super(PasswordInput, self).process_field_data(form, field) + passwd2 = form._cw.form.get(field.input_name(form, '-confirm')) + if passwd1 == passwd2: + if passwd1 is None: + return None + return passwd1.encode('utf-8') + raise ProcessFormError(form._cw._("password and confirmation don't match")) + class PasswordSingleInput(Input): """ without a confirmation field""" type = 'password' + def process_field_data(self, form, field): + value = super(PasswordSingleInput, self).process_field_data(form, field) + if value is not None: + return value.encode('utf-8') + return value + class FileInput(Input): """""" type = 'file' - def _render_attrs(self, form, field): + def values_and_attributes(self, form, field): # ignore value which makes no sense here (XXX even on form validation error?) - name, values, attrs = super(FileInput, self)._render_attrs(form, field) - return name, ('',), attrs + values, attrs = super(FileInput, self).values_and_attributes(form, field) + return ('',), attrs class HiddenInput(Input): @@ -151,7 +175,7 @@ """''') + +''') def test_richtextfield_2(self): self.req.use_fckeditor = lambda: True - self._test_richtextfield('') + self._test_richtextfield('') def test_filefield(self): class FFForm(EntityFieldsForm): - data = FileField(format_field=StringField(name='data_format', max_length=50), - encoding_field=StringField(name='data_encoding', max_length=20)) - file = self.add_entity('File', data_name=u"pouet.txt", data_encoding=u'UTF-8', + data = FileField( + format_field=StringField(name='data_format', max_length=50, + eidparam=True, role='subject'), + encoding_field=StringField(name='data_encoding', max_length=20, + eidparam=True, role='subject'), + eidparam=True, role='subject') + file = self.req.create_entity('File', data_name=u"pouet.txt", data_encoding=u'UTF-8', data=Binary('new widgets system')) form = FFForm(self.req, redirect_path='perdu.com', entity=file) self.assertTextEquals(self._render_entity_field('data', form), - ''' -show advanced fields -