# HG changeset patch # User Julien Cristau # Date 1411574670 -7200 # Node ID 793377697c8133a88cec7c28cdd2e7511037b8d2 # Parent 9ccdb3751fbe74f316f20dec290609c2ec5e43c0# Parent 84738d495ffd9c3d74deff6bc8f4700cad5b3fe4 merge 3.18.6 into 3.19 diff -r 84738d495ffd -r 793377697c81 .hgignore --- a/.hgignore Wed Sep 24 17:35:59 2014 +0200 +++ b/.hgignore Wed Sep 24 18:04:30 2014 +0200 @@ -18,3 +18,4 @@ ^doc/html/ ^doc/doctrees/ ^doc/book/en/devweb/js_api/ +data/pgdb/ diff -r 84738d495ffd -r 793377697c81 .hgtags --- a/.hgtags Wed Sep 24 17:35:59 2014 +0200 +++ b/.hgtags Wed Sep 24 18:04:30 2014 +0200 @@ -359,3 +359,15 @@ d915013567429b481cb2c367071e36451c07a226 cubicweb-version-3.18.6 d915013567429b481cb2c367071e36451c07a226 cubicweb-debian-version-3.18.6-1 d915013567429b481cb2c367071e36451c07a226 cubicweb-centos-version-3.18.6-1 +1141927b8494aabd16e31b0d0d9a50fe1fed5f2f cubicweb-version-3.19.0 +1141927b8494aabd16e31b0d0d9a50fe1fed5f2f cubicweb-debian-version-3.19.0-1 +1141927b8494aabd16e31b0d0d9a50fe1fed5f2f cubicweb-centos-version-3.19.0-1 +1fe4bc4a8ac8831a379e9ebea08d75fbb6fc5c2a cubicweb-version-3.19.1 +1fe4bc4a8ac8831a379e9ebea08d75fbb6fc5c2a cubicweb-debian-version-3.19.1-1 +1fe4bc4a8ac8831a379e9ebea08d75fbb6fc5c2a cubicweb-centos-version-3.19.1-1 +8ac2202866e747444ce12778ff8789edd9c92eae cubicweb-version-3.19.2 +8ac2202866e747444ce12778ff8789edd9c92eae cubicweb-debian-version-3.19.2-1 +8ac2202866e747444ce12778ff8789edd9c92eae cubicweb-centos-version-3.19.2-1 +37f7c60f89f13dfcf326a4ea0a98ca20d959f7bd cubicweb-version-3.19.3 +37f7c60f89f13dfcf326a4ea0a98ca20d959f7bd cubicweb-debian-version-3.19.3-1 +37f7c60f89f13dfcf326a4ea0a98ca20d959f7bd cubicweb-centos-version-3.19.3-1 diff -r 84738d495ffd -r 793377697c81 MANIFEST.in --- a/MANIFEST.in Wed Sep 24 17:35:59 2014 +0200 +++ b/MANIFEST.in Wed Sep 24 18:04:30 2014 +0200 @@ -10,7 +10,7 @@ recursive-include misc *.py *.png *.display include web/views/*.pt -recursive-include web/data external_resources *.js *.css *.py *.png *.gif *.ico *.ttf +recursive-include web/data external_resources *.js *.css *.py *.png *.gif *.ico *.ttf *.svg *.woff *.eot recursive-include web/wdoc *.rst *.png *.xml ChangeLog* recursive-include devtools/data *.js *.css *.sh diff -r 84738d495ffd -r 793377697c81 __init__.py --- a/__init__.py Wed Sep 24 17:35:59 2014 +0200 +++ b/__init__.py Wed Sep 24 18:04:30 2014 +0200 @@ -242,3 +242,15 @@ errors[rname(*key)] = errors.pop(key) return ValidationError(getattr(entity, 'eid', entity), errors, substitutions, i18nvalues) + + +# exceptions ################################################################## + +class ProgrammingError(Exception): #DatabaseError): + """Exception raised for errors that are related to the database's operation + and not necessarily under the control of the programmer, e.g. an unexpected + disconnect occurs, the data source name is not found, a transaction could + not be processed, a memory allocation error occurred during processing, + etc. + """ + diff -r 84738d495ffd -r 793377697c81 __pkginfo__.py --- a/__pkginfo__.py Wed Sep 24 17:35:59 2014 +0200 +++ b/__pkginfo__.py Wed Sep 24 18:04:30 2014 +0200 @@ -22,7 +22,7 @@ modname = distname = "cubicweb" -numversion = (3, 18, 6) +numversion = (3, 19, 3) version = '.'.join(str(num) for num in numversion) description = "a repository of entities / relations for knowledge management" @@ -44,13 +44,12 @@ 'rql': '>= 0.31.2', 'yams': '>= 0.39.1', #gettext # for xgettext, msgcat, etc... - # web dependancies - 'simplejson': '>= 2.0.9', + # web dependencies 'lxml': '', 'Twisted': '', # XXX graphviz # server dependencies - 'logilab-database': '>= 1.11', + 'logilab-database': '>= 1.12.1', 'passlib': '', } diff -r 84738d495ffd -r 793377697c81 _exceptions.py --- a/_exceptions.py Wed Sep 24 17:35:59 2014 +0200 +++ b/_exceptions.py Wed Sep 24 18:04:30 2014 +0200 @@ -76,13 +76,6 @@ """the eid is not defined in the system tables""" msg = 'No entity with eid %s in the repository' -class ETypeNotSupportedBySources(RepositoryError, InternalError): - """no source support an entity type""" - msg = 'No source supports %r entity\'s type' - -class MultiSourcesError(RepositoryError, InternalError): - """usually due to bad multisources configuration or rql query""" - class UniqueTogetherError(RepositoryError): """raised when a unique_together constraint caused an IntegrityError""" def __init__(self, session, **kwargs): diff -r 84738d495ffd -r 793377697c81 cubicweb.spec --- a/cubicweb.spec Wed Sep 24 17:35:59 2014 +0200 +++ b/cubicweb.spec Wed Sep 24 18:04:30 2014 +0200 @@ -7,7 +7,7 @@ %endif Name: cubicweb -Version: 3.18.6 +Version: 3.19.3 Release: logilab.1%{?dist} Summary: CubicWeb is a semantic web application framework Source0: http://download.logilab.org/pub/cubicweb/cubicweb-%{version}.tar.gz @@ -24,7 +24,7 @@ Requires: %{python}-logilab-mtconverter >= 0.8.0 Requires: %{python}-rql >= 0.31.2 Requires: %{python}-yams >= 0.39.1 -Requires: %{python}-logilab-database >= 1.11.0 +Requires: %{python}-logilab-database >= 1.12.1 Requires: %{python}-passlib Requires: %{python}-lxml Requires: %{python}-twisted-web diff -r 84738d495ffd -r 793377697c81 cwconfig.py --- a/cwconfig.py Wed Sep 24 17:35:59 2014 +0200 +++ b/cwconfig.py Wed Sep 24 18:04:30 2014 +0200 @@ -124,14 +124,14 @@ Python `````` -If you installed *CubicWeb* by cloning the Mercurial forest or from source +If you installed *CubicWeb* by cloning the Mercurial shell repository or from source distribution, then you will need to update the environment variable PYTHONPATH by -adding the path to the forest `cubicweb`: +adding the path to `cubicweb`: Add the following lines to either :file:`.bashrc` or :file:`.bash_profile` to configure your development environment :: - export PYTHONPATH=/full/path/to/cubicweb-forest + export PYTHONPATH=/full/path/to/grshell-cubicweb If you installed *CubicWeb* with packages, no configuration is required and your new cubes will be placed in `/usr/share/cubicweb/cubes` and your instances will @@ -554,27 +554,19 @@ todo.append(depcube) return cubes - def reorder_cubes(self, cubes): + @classmethod + def reorder_cubes(cls, cubes): """reorder cubes from the top level cubes to inner dependencies cubes """ from logilab.common.graph import ordered_nodes, UnorderableGraph - # See help string for 'ui-cube' in web/webconfig.py for the reasons - # behind this hack. - uicube = self.get('ui-cube', None) graph = {} - if uicube: - graph[uicube] = set() for cube in cubes: cube = CW_MIGRATION_MAP.get(cube, cube) - graph[cube] = set(dep for dep in self.cube_dependencies(cube) + graph[cube] = set(dep for dep in cls.cube_dependencies(cube) if dep in cubes) - graph[cube] |= set(dep for dep in self.cube_recommends(cube) + graph[cube] |= set(dep for dep in cls.cube_recommends(cube) if dep in cubes) - if uicube and cube != uicube \ - and cube not in self.cube_dependencies(uicube) \ - and cube not in self.cube_recommends(uicube): - graph[cube].add(uicube) try: return ordered_nodes(graph) except UnorderableGraph as ex: @@ -795,7 +787,6 @@ _cubes = None def init_cubes(self, cubes): - assert self._cubes is None, repr(self._cubes) self._cubes = self.reorder_cubes(cubes) # load cubes'__init__.py file first for cube in cubes: @@ -999,7 +990,7 @@ super(CubicWebConfiguration, self).adjust_sys_path() # adding apphome to python path is not usually necessary in production # environments, but necessary for tests - if self.apphome and not self.apphome in sys.path: + if self.apphome and self.apphome not in sys.path: sys.path.insert(0, self.apphome) @property diff -r 84738d495ffd -r 793377697c81 cwctl.py --- a/cwctl.py Wed Sep 24 17:35:59 2014 +0200 +++ b/cwctl.py Wed Sep 24 18:04:30 2014 +0200 @@ -782,7 +782,9 @@ if self.config.fs_only or toupgrade: for cube, fromversion, toversion in toupgrade: print '-> migration needed from %s to %s for %s' % (fromversion, toversion, cube) - mih.migrate(vcconf, reversed(toupgrade), self.config) + with mih.cnx: + with mih.cnx.security_enabled(False, False): + mih.migrate(vcconf, reversed(toupgrade), self.config) else: print '-> no data migration needed for instance %s.' % appid # rewrite main configuration file @@ -913,13 +915,14 @@ def _handle_networked(self, appuri): """ returns migration context handler & shutdown function """ from cubicweb import AuthenticationError - from cubicweb.dbapi import connect + from cubicweb.repoapi import connect, get_repository from cubicweb.server.utils import manager_userpasswd from cubicweb.server.migractions import ServerMigrationHelper while True: try: login, pwd = manager_userpasswd(msg=None) - cnx = connect(appuri, login=login, password=pwd, mulcnx=False) + repo = get_repository(appuri) + cnx = connect(repo, login=login, password=pwd, mulcnx=False) except AuthenticationError as ex: print ex except (KeyboardInterrupt, EOFError): @@ -949,15 +952,17 @@ else: mih, shutdown_callback = self._handle_networked(appuri) try: - if args: - # use cmdline parser to access left/right attributes only - # remember that usage requires instance appid as first argument - scripts, args = self.cmdline_parser.largs[1:], self.cmdline_parser.rargs - for script in scripts: - mih.cmd_process_script(script, scriptargs=args) - mih.commit() - else: - mih.interactive_shell() + with mih.cnx: + with mih.cnx.security_enabled(False, False): + if args: + # use cmdline parser to access left/right attributes only + # remember that usage requires instance appid as first argument + scripts, args = self.cmdline_parser.largs[1:], self.cmdline_parser.rargs + for script in scripts: + mih.cmd_process_script(script, scriptargs=args) + mih.commit() + else: + mih.interactive_shell() finally: shutdown_callback() @@ -1044,12 +1049,16 @@ return ('stdlib',) return ('stdlib', 'werkzeug') -class WSGIDebugStartHandler(InstanceCommand): +class WSGIStartHandler(InstanceCommand): """Start an interactive wsgi server """ name = 'wsgi' actionverb = 'started' arguments = '' options = ( + ("debug", + {'short': 'D', 'action': 'store_true', + 'default': False, + 'help': 'start server in debug mode.'}), ('method', {'short': 'm', 'type': 'choice', @@ -1059,16 +1068,16 @@ 'help': 'wsgi utility/method'}), ('loglevel', {'short': 'l', - 'type' : 'choice', + 'type': 'choice', 'metavar': '', - 'default': 'debug', + 'default': None, 'choices': ('debug', 'info', 'warning', 'error'), 'help': 'debug if -D is set, error otherwise', }), ) def wsgi_instance(self, appid): - config = cwcfg.config_for(appid, debugmode=1) + config = cwcfg.config_for(appid, debugmode=self['debug']) init_cmdline_log_threshold(config, self['loglevel']) assert config.name == 'all-in-one' meth = self['method'] @@ -1083,7 +1092,7 @@ for cmdcls in (ListCommand, CreateInstanceCommand, DeleteInstanceCommand, StartInstanceCommand, StopInstanceCommand, RestartInstanceCommand, - WSGIDebugStartHandler, + WSGIStartHandler, ReloadConfigurationCommand, StatusCommand, UpgradeInstanceCommand, ListVersionsInstanceCommand, diff -r 84738d495ffd -r 793377697c81 cwvreg.py --- a/cwvreg.py Wed Sep 24 17:35:59 2014 +0200 +++ b/cwvreg.py Wed Sep 24 18:04:30 2014 +0200 @@ -723,7 +723,7 @@ def solutions(self, req, rqlst, args): def type_from_eid(eid, req=req): - return req.describe(eid)[0] + return req.entity_metas(eid)['type'] return self.rqlhelper.compute_solutions(rqlst, {'eid': type_from_eid}, args) def parse(self, req, rql, args=None): diff -r 84738d495ffd -r 793377697c81 dataimport.py --- a/dataimport.py Wed Sep 24 17:35:59 2014 +0200 +++ b/dataimport.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -622,11 +622,13 @@ self.rql('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype, {'x': int(eid_from), 'y': int(eid_to)}) + @deprecated("[3.19] use session.find(*args, **kwargs).entities() instead") def find_entities(self, *args, **kwargs): - return self.session.find_entities(*args, **kwargs) + return self.session.find(*args, **kwargs).entities() + @deprecated("[3.19] use session.find(*args, **kwargs).one() instead") def find_one_entity(self, *args, **kwargs): - return self.session.find_one_entity(*args, **kwargs) + return self.session.find(*args, **kwargs).one() # the import controller ######################################################## @@ -948,7 +950,7 @@ def drop_indexes(self, etype): """Drop indexes for a given entity type""" if etype not in self.indexes_etypes: - cu = self.session.cnxset['system'] + cu = self.session.cnxset.cu def index_to_attr(index): """turn an index name to (database) attribute name""" return index.replace(etype.lower(), '').replace('idx', '').strip('_') @@ -981,7 +983,6 @@ self._storage_handler = self.system_source._storage_handler self.preprocess_entity = self.system_source.preprocess_entity self.sqlgen = self.system_source.sqlgen - self.copy_based_source = self.system_source.copy_based_source self.uri = self.system_source.uri self.eid = self.system_source.eid # Directory to write temporary files @@ -1113,7 +1114,6 @@ def _handle_insert_entity_sql(self, session, sql, attrs): # We have to overwrite the source given in parameters # as here, we directly use the system source - attrs['source'] = 'system' attrs['asource'] = self.system_source.uri self._append_to_entities(sql, attrs) @@ -1135,9 +1135,8 @@ if extid is not None: assert isinstance(extid, str) extid = b64encode(extid) - uri = 'system' if source.copy_based_source else source.uri attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid, - 'source': uri, 'asource': source.uri, 'mtime': datetime.utcnow()} + 'asource': source.uri} self._handle_insert_entity_sql(session, self.sqlgen.insert('entities', attrs), attrs) # insert core relations: is, is_instance_of and cw_source try: diff -r 84738d495ffd -r 793377697c81 dbapi.py --- a/dbapi.py Wed Sep 24 17:35:59 2014 +0200 +++ b/dbapi.py Wed Sep 24 18:04:30 2014 +0200 @@ -34,13 +34,13 @@ from urlparse import urlparse from logilab.common.logging_ext import set_log_methods -from logilab.common.decorators import monkeypatch +from logilab.common.decorators import monkeypatch, cachedproperty from logilab.common.deprecation import deprecated -from cubicweb import ETYPE_NAME_MAP, ConnectionError, AuthenticationError,\ - cwvreg, cwconfig +from cubicweb import (ETYPE_NAME_MAP, AuthenticationError, ProgrammingError, + cwvreg, cwconfig) +from cubicweb.repoapi import get_repository from cubicweb.req import RequestSessionBase -from cubicweb.utils import parse_repo_uri _MARKER = object() @@ -91,53 +91,7 @@ self.close_on_del = close -def _get_inmemory_repo(config, vreg=None): - from cubicweb.server.repository import Repository - from cubicweb.server.utils import TasksManager - return Repository(config, TasksManager(), vreg=vreg) - -def get_repository(uri=None, config=None, vreg=None): - """get a repository for the given URI or config/vregistry (in case we're - loading the repository for a client, eg web server, configuration). - - The returned repository may be an in-memory repository or a proxy object - using a specific RPC method, depending on the given URI (pyro or zmq). - """ - if uri is None: - return _get_inmemory_repo(config, vreg) - - protocol, hostport, appid = parse_repo_uri(uri) - - if protocol == 'inmemory': - # me may have been called with a dummy 'inmemory://' uri ... - return _get_inmemory_repo(config, vreg) - - if protocol == 'pyroloc': # direct connection to the instance - from logilab.common.pyro_ext import get_proxy - uri = uri.replace('pyroloc', 'PYRO') - return get_proxy(uri) - - if protocol == 'pyro': # connection mediated through the pyro ns - from logilab.common.pyro_ext import ns_get_proxy - path = appid.strip('/') - if not path: - raise ConnectionError( - "can't find instance name in %s (expected to be the path component)" - % uri) - if '.' in path: - nsgroup, nsid = path.rsplit('.', 1) - else: - nsgroup = 'cubicweb' - nsid = path - return ns_get_proxy(nsid, defaultnsgroup=nsgroup, nshost=hostport) - - if protocol.startswith('zmqpickle-'): - from cubicweb.zmqclient import ZMQRepositoryClient - return ZMQRepositoryClient(uri) - else: - raise ConnectionError('unknown protocol: `%s`' % protocol) - - +@deprecated('[3.19] the dbapi is deprecated. Have a look at the new repoapi.') def _repo_connect(repo, login, **kwargs): """Constructor to create a new connection to the given CubicWeb repository. @@ -291,7 +245,6 @@ self.cnx = cnx self.data = {} self.login = login - self.mtime = time() # dbapi session identifier is the same as the first connection # identifier, but may later differ in case of auto-reconnection as done # by the web authentication manager (in cw.web.views.authentication) @@ -327,17 +280,17 @@ else: # these args are initialized after a connection is # established - self.session = None + self.session = DBAPISession(None) self.cnx = self.user = _NeedAuthAccessMock() self.set_default_language(vreg) - def from_controller(self): - return 'view' + def get_option_value(self, option, foreid=None): + if foreid is not None: + warn('[3.19] foreid argument is deprecated', DeprecationWarning, + stacklevel=2) + return self.cnx.get_option_value(option) - def get_option_value(self, option, foreid=None): - return self.cnx.get_option_value(option, foreid) - - def set_session(self, session, user=None): + def set_session(self, session): """method called by the session handler when the user is authenticated or an anonymous connection is open """ @@ -345,11 +298,8 @@ if session.cnx: self.cnx = session.cnx self.execute = session.cnx.cursor(self).execute - if user is None: - user = self.cnx.user(self) - if user is not None: - self.user = user - self.set_entity_cache(user) + self.user = self.cnx.user(self) + self.set_entity_cache(self.user) def execute(self, *args, **kwargs): # pylint: disable=E0202 """overriden when session is set. By default raise authentication error @@ -371,8 +321,8 @@ # server-side service call ################################################# - def call_service(self, regid, async=False, **kwargs): - return self.cnx.call_service(regid, async, **kwargs) + def call_service(self, regid, **kwargs): + return self.cnx.call_service(regid, **kwargs) # entities cache management ############################################### @@ -393,10 +343,12 @@ # low level session data management ####################################### + @deprecated('[3.19] use session or transaction data') def get_shared_data(self, key, default=None, pop=False, txdata=False): """see :meth:`Connection.get_shared_data`""" return self.cnx.get_shared_data(key, default, pop, txdata) + @deprecated('[3.19] use session or transaction data') def set_shared_data(self, key, value, txdata=False, querydata=None): """see :meth:`Connection.set_shared_data`""" if querydata is not None: @@ -407,20 +359,18 @@ # server session compat layer ############################################# - def describe(self, eid, asdict=False): + def entity_metas(self, eid): """return a tuple (type, sourceuri, extid) for the entity with id """ - return self.cnx.describe(eid, asdict) + return self.cnx.entity_metas(eid) def source_defs(self): """return the definition of sources used by the repository.""" return self.cnx.source_defs() - @deprecated('[3.17] do not use hijack_user. create new Session object') - def hijack_user(self, user): - """return a fake request/session using specified user""" - req = DBAPIRequest(self.vreg) - req.set_session(self.session, user) - return req + @deprecated('[3.19] use .entity_metas(eid) instead') + def describe(self, eid, asdict=False): + """return a tuple (type, sourceuri, extid) for the entity with id """ + return self.cnx.describe(eid, asdict) # these are overridden by set_log_methods below # only defining here to prevent pylint from complaining @@ -429,16 +379,6 @@ set_log_methods(DBAPIRequest, getLogger('cubicweb.dbapi')) -# exceptions ################################################################## - -class ProgrammingError(Exception): #DatabaseError): - """Exception raised for errors that are related to the database's operation - and not necessarily under the control of the programmer, e.g. an unexpected - disconnect occurs, the data source name is not found, a transaction could - not be processed, a memory allocation error occurred during processing, - etc. - """ - # cursor / connection objects ################################################## @@ -531,7 +471,6 @@ # make exceptions available through the connection object ProgrammingError = ProgrammingError # attributes that may be overriden per connection instance - anonymous_connection = False cursor_class = Cursor vreg = None _closed = None @@ -557,6 +496,13 @@ return False return isinstance(self._repo, Repository) + @property # could be a cached property but we want to prevent assigment to + # catch potential programming error. + def anonymous_connection(self): + login = self._repo.user_info(self.sessionid)[1] + anon_login = self.vreg.config.get('anonymous-user') + return login == anon_login + def __repr__(self): if self.anonymous_connection: return '' % self.sessionid @@ -583,8 +529,8 @@ # server-side service call ################################################# @check_not_closed - def call_service(self, regid, async=False, **kwargs): - return self._repo.call_service(self.sessionid, regid, async, **kwargs) + def call_service(self, regid, **kwargs): + return self._repo.call_service(self.sessionid, regid, **kwargs) # connection initialization methods ######################################## @@ -641,11 +587,11 @@ def request(self): if self._web_request: - from cubicweb.web.request import CubicWebRequestBase - req = CubicWebRequestBase(self.vreg, False) + from cubicweb.web.request import DBAPICubicWebRequestBase + req = DBAPICubicWebRequestBase(self.vreg, False) req.get_header = lambda x, default=None: default - req.set_session = lambda session, user=None: DBAPIRequest.set_session( - req, session, user) + req.set_session = lambda session: DBAPIRequest.set_session( + req, session) req.relative_path = lambda includeparams=True: '' else: req = DBAPIRequest(self.vreg) @@ -720,22 +666,49 @@ @check_not_closed def get_option_value(self, option, foreid=None): - """Return the value for `option` in the configuration. If `foreid` is - specified, the actual repository to which this entity belongs is - dereferenced and the option value retrieved from it. + """Return the value for `option` in the configuration. + + `foreid` argument is deprecated and now useless (as of 3.19). """ - return self._repo.get_option_value(option, foreid) + if foreid is not None: + warn('[3.19] foreid argument is deprecated', DeprecationWarning, + stacklevel=2) + return self._repo.get_option_value(option) + @check_not_closed + def entity_metas(self, eid): + """return a tuple (type, sourceuri, extid) for the entity with id """ + try: + return self._repo.entity_metas(self.sessionid, eid, **self._txid()) + except AttributeError: + # talking to pre 3.19 repository + metas = self._repo.describe(self.sessionid, eid, **self._txid()) + if len(metas) == 3: # even older backward compat + metas = list(metas) + metas.append(metas[1]) + return dict(zip(('type', 'source', 'extid', 'asource'), metas)) + + + @deprecated('[3.19] use .entity_metas(eid) instead') + @check_not_closed def describe(self, eid, asdict=False): - metas = self._repo.describe(self.sessionid, eid, **self._txid()) - if len(metas) == 3: # backward compat - metas = list(metas) - metas.append(metas[1]) + try: + metas = self._repo.entity_metas(self.sessionid, eid, **self._txid()) + except AttributeError: + metas = self._repo.describe(self.sessionid, eid, **self._txid()) + # talking to pre 3.19 repository + if len(metas) == 3: # even older backward compat + metas = list(metas) + metas.append(metas[1]) + if asdict: + return dict(zip(('type', 'source', 'extid', 'asource'), metas)) + return metas[:-1] if asdict: - return dict(zip(('type', 'source', 'extid', 'asource'), metas)) - # XXX :-1 for cw compat, use asdict=True for full information - return metas[:-1] + metas['asource'] = meta['source'] # XXX pre 3.19 client compat + return metas + return metas['type'], metas['source'], metas['extid'] + # db-api like interface #################################################### diff -r 84738d495ffd -r 793377697c81 debian/changelog --- a/debian/changelog Wed Sep 24 17:35:59 2014 +0200 +++ b/debian/changelog Wed Sep 24 18:04:30 2014 +0200 @@ -1,3 +1,27 @@ +cubicweb (3.19.3-1) unstable; urgency=low + + * new upstream release + + -- Julien Cristau Fri, 18 Jul 2014 16:16:32 +0200 + +cubicweb (3.19.2-1) unstable; urgency=low + + * new upstream release + + -- Julien Cristau Thu, 03 Jul 2014 09:53:52 +0200 + +cubicweb (3.19.1-1) unstable; urgency=low + + * new upstream release + + -- Julien Cristau Tue, 03 Jun 2014 12:16:00 +0200 + +cubicweb (3.19.0-1) unstable; urgency=low + + * new upstream release + + -- Julien Cristau Mon, 28 Apr 2014 18:35:27 +0200 + cubicweb (3.18.6-1) unstable; urgency=low * new upstream release diff -r 84738d495ffd -r 793377697c81 debian/control --- a/debian/control Wed Sep 24 17:35:59 2014 +0200 +++ b/debian/control Wed Sep 24 18:04:30 2014 +0200 @@ -52,7 +52,7 @@ ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), - python-logilab-database (>= 1.11.0), + python-logilab-database (>= 1.12.1), cubicweb-postgresql-support | cubicweb-mysql-support | python-pysqlite2, @@ -124,7 +124,6 @@ ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version}), - python-simplejson (>= 2.0.9) Recommends: python-docutils (>= 0.6), python-vobject, @@ -166,6 +165,11 @@ cubicweb-person (<< 1.8.0), cubicweb-geocoding (<< 0.2.0), cubicweb-invoice (<< 0.6.1), + cubicweb-mercurial-server (<< 0.4.2), + cubicweb-forgotpwd (<< 0.4.3), + cubicweb-registration (<< 0.4.3), + cubicweb-vcsfile (<< 1.15.0), + cubicweb-bootstrap (<< 0.6), Description: common library for the CubicWeb framework CubicWeb is a semantic web application framework. . diff -r 84738d495ffd -r 793377697c81 devtools/__init__.py --- a/devtools/__init__.py Wed Sep 24 17:35:59 2014 +0200 +++ b/devtools/__init__.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -21,12 +21,16 @@ import os import sys +import errno import logging import shutil import pickle import glob +import random +import subprocess import warnings import tempfile +import getpass from hashlib import sha1 # pylint: disable=E0611 from datetime import timedelta from os.path import (abspath, join, exists, split, isabs, isdir) @@ -86,6 +90,13 @@ 'password': u'gingkow', }, } +DEFAULT_PSQL_SOURCES = DEFAULT_SOURCES.copy() +DEFAULT_PSQL_SOURCES['system'] = DEFAULT_SOURCES['system'].copy() +DEFAULT_PSQL_SOURCES['system']['db-driver'] = 'postgres' +DEFAULT_PSQL_SOURCES['system']['db-host'] = '/tmp' +DEFAULT_PSQL_SOURCES['system']['db-port'] = str(random.randrange(5432, 2**16)) +DEFAULT_PSQL_SOURCES['system']['db-user'] = unicode(getpass.getuser()) +DEFAULT_PSQL_SOURCES['system']['db-password'] = None def turn_repo_off(repo): """ Idea: this is less costly than a full re-creation of the repo object. @@ -121,8 +132,7 @@ repo._type_source_cache = {} repo._extid_cache = {} repo.querier._rql_cache = {} - for source in repo.sources: - source.reset_caches() + repo.system_source.reset_caches() repo._needs_refresh = False @@ -131,6 +141,8 @@ read_instance_schema = False init_repository = True skip_db_create_and_restore = False + default_sources = DEFAULT_SOURCES + def __init__(self, appid='data', apphome=None, log_threshold=logging.CRITICAL+10): # must be set before calling parent __init__ if apphome is None: @@ -192,20 +204,20 @@ sourcefile = super(TestServerConfiguration, self).sources_file() return sourcefile - def sources(self): + def read_sources_file(self): """By default, we run tests with the sqlite DB backend. One may use its own configuration by just creating a 'sources' file in the test - directory from wich tests are launched or by specifying an alternative + directory from which tests are launched or by specifying an alternative sources file using self.sourcefile. """ try: - sources = super(TestServerConfiguration, self).sources() + sources = super(TestServerConfiguration, self).read_sources_file() except ExecutionError: sources = {} if not sources: - sources = DEFAULT_SOURCES + sources = self.default_sources if 'admin' not in sources: - sources['admin'] = DEFAULT_SOURCES['admin'] + sources['admin'] = self.default_sources['admin'] return sources # web config methods needed here for cases when we use this config as a web @@ -246,6 +258,10 @@ self.sourcefile = sourcefile +class PostgresApptestConfiguration(ApptestConfiguration): + default_sources = DEFAULT_PSQL_SOURCES + + class RealDatabaseConfiguration(ApptestConfiguration): """configuration class for tests to run on a real database. @@ -262,14 +278,14 @@ sourcefile='/path/to/sources') def test_something(self): - rset = self.execute('Any X WHERE X is CWUser') - self.view('foaf', rset) + with self.admin_access.web_request() as req: + rset = req.execute('Any X WHERE X is CWUser') + self.view('foaf', rset, req=req) """ skip_db_create_and_restore = True read_instance_schema = True # read schema from database - # test database handling ####################################################### DEFAULT_EMPTY_DB_ID = '__default_empty_db__' @@ -279,8 +295,9 @@ db_cache = {} explored_glob = set() - def __init__(self, config): + def __init__(self, config, init_config=None): self.config = config + self.init_config = init_config self._repo = None # pure consistency check assert self.system_source['db-driver'] == self.DRIVER @@ -368,6 +385,9 @@ """ if self._repo is None: self._repo = self._new_repo(self.config) + # config has now been bootstrapped, call init_config if specified + if self.init_config is not None: + self.init_config(self.config) repo = self._repo repo.turn_repo_on() if startup and not repo._has_started: @@ -389,12 +409,12 @@ def get_cnx(self): """return Connection object on the current repository""" - from cubicweb.dbapi import _repo_connect + from cubicweb.repoapi import connect repo = self.get_repo() - sources = self.config.sources() + sources = self.config.read_sources_file() login = unicode(sources['admin']['login']) password = sources['admin']['password'] or 'xxx' - cnx = _repo_connect(repo, login, password=password) + cnx = connect(repo, login, password=password) return cnx def get_repo_and_cnx(self, db_id=DEFAULT_EMPTY_DB_ID): @@ -412,8 +432,7 @@ @property def system_source(self): - sources = self.config.sources() - return sources['system'] + return self.config.system_source_config @property def dbname(self): @@ -477,15 +496,9 @@ self.restore_database(DEFAULT_EMPTY_DB_ID) repo = self.get_repo(startup=True) cnx = self.get_cnx() - session = repo._sessions[cnx.sessionid] - session.set_cnxset() - _commit = session.commit - def keep_cnxset_commit(free_cnxset=False): - _commit(free_cnxset=free_cnxset) - session.commit = keep_cnxset_commit - pre_setup_func(session, self.config) - session.commit() - cnx.close() + with cnx: + pre_setup_func(cnx._cnx, self.config) + cnx.commit() self.backup_database(test_db_id) @@ -522,6 +535,42 @@ class PostgresTestDataBaseHandler(TestDataBaseHandler): DRIVER = 'postgres' + __CTL = set() + + @classmethod + def killall(cls): + for datadir in cls.__CTL: + subprocess.call(['pg_ctl', 'stop', '-D', datadir, '-m', 'fast']) + + def __init__(self, *args, **kwargs): + super(PostgresTestDataBaseHandler, self).__init__(*args, **kwargs) + datadir = join(self.config.apphome, 'pgdb') + if not exists(datadir): + try: + subprocess.check_call(['initdb', '-D', datadir, '-E', 'utf-8', '--locale=C']) + + except OSError, err: + if err.errno == errno.ENOENT: + raise OSError('"initdb" could not be found. ' + 'You should add the postgresql bin folder to your PATH ' + '(/usr/lib/postgresql/9.1/bin for example).') + raise + port = self.system_source['db-port'] + directory = self.system_source['db-host'] + env = os.environ.copy() + env['PGPORT'] = str(port) + env['PGHOST'] = str(directory) + try: + subprocess.check_call(['pg_ctl', 'start', '-w', '-D', datadir, '-o', '-h "" -k %s -p %s' % (directory, port)], + env=env) + except OSError, err: + if err.errno == errno.ENOENT: + raise OSError('"pg_ctl" could not be found. ' + 'You should add the postgresql bin folder to your PATH ' + '(/usr/lib/postgresql/9.1/bin for example).') + raise + self.__CTL.add(datadir) + @property @cached def helper(self): @@ -577,7 +626,8 @@ finally: templcursor.close() cnx.close() - init_repository(self.config, interactive=False) + init_repository(self.config, interactive=False, + init_config=self.init_config) except BaseException: if self.dbcnx is not None: self.dbcnx.rollback() @@ -653,7 +703,8 @@ """initialize a fresh sqlserver databse used for testing purpose""" if self.config.init_repository: from cubicweb.server import init_repository - init_repository(self.config, interactive=False, drop=True) + init_repository(self.config, interactive=False, drop=True, + init_config=self.init_config) ### sqlite test database handling ############################################## @@ -694,8 +745,8 @@ def absolute_dbfile(self): """absolute path of current database file""" dbfile = join(self._ensure_test_backup_db_dir(), - self.config.sources()['system']['db-name']) - self.config.sources()['system']['db-name'] = dbfile + self.system_source['db-name']) + self.system_source['db-name'] = dbfile return dbfile def process_cache_entry(self, directory, dbname, db_id, entry): @@ -730,10 +781,12 @@ # initialize the database from cubicweb.server import init_repository self._cleanup_database(self.absolute_dbfile()) - init_repository(self.config, interactive=False) + init_repository(self.config, interactive=False, + init_config=self.init_config) import atexit atexit.register(SQLiteTestDataBaseHandler._cleanup_all_tmpdb) +atexit.register(PostgresTestDataBaseHandler.killall) def install_sqlite_patch(querier): @@ -823,7 +876,7 @@ # XXX a class method on Test ? _CONFIG = None -def get_test_db_handler(config): +def get_test_db_handler(config, init_config=None): global _CONFIG if _CONFIG is not None and config is not _CONFIG: from logilab.common.modutils import cleanup_sys_modules @@ -840,12 +893,11 @@ handler = HCACHE.get(config) if handler is not None: return handler - sources = config.sources() - driver = sources['system']['db-driver'] + driver = config.system_source_config['db-driver'] key = (driver, config) handlerkls = HANDLERS.get(driver, None) if handlerkls is not None: - handler = handlerkls(config) + handler = handlerkls(config, init_config) if config.skip_db_create_and_restore: handler = NoCreateDropDatabaseHandler(handler) HCACHE.set(config, handler) diff -r 84738d495ffd -r 793377697c81 devtools/devctl.py --- a/devtools/devctl.py Wed Sep 24 17:35:59 2014 +0200 +++ b/devtools/devctl.py Wed Sep 24 18:04:30 2014 +0200 @@ -776,13 +776,19 @@ 'short': "i", 'metavar': "", 'help':'coma separated list of entity types to include in view', }), + ('show-etype', + {'type':'string', 'default':'', + 'metavar': '', + 'help':'show graph of this etype and its neighbours' + }), ] def run(self, args): from subprocess import Popen from tempfile import NamedTemporaryFile from logilab.common.textutils import splitstrip - from yams import schema2dot, BASE_TYPES + from logilab.common.graph import GraphGenerator, DotBackend + from yams import schema2dot as s2d, BASE_TYPES from cubicweb.schema import (META_RTYPES, SCHEMA_TYPES, SYSTEM_RTYPES, WORKFLOW_TYPES, INTERNAL_TYPES) cubes = splitstrip(args[0]) @@ -801,7 +807,22 @@ skiptypes |= set(('CWUser', 'CWGroup', 'EmailAddress')) skiptypes |= set(self['exclude-type'].split(',')) skiptypes -= set(self['include-type'].split(',')) - schema2dot.schema2dot(schema, out, skiptypes=skiptypes) + + if not self['show-etype']: + s2d.schema2dot(schema, out, skiptypes=skiptypes) + else: + etype = self['show-etype'] + visitor = s2d.OneHopESchemaVisitor(schema[etype], skiptypes=skiptypes) + propshdlr = s2d.SchemaDotPropsHandler(visitor) + backend = DotBackend('schema', 'BT', + ratio='compress',size=None, + renderer='dot', + additionnal_param={'overlap' : 'false', + 'splines' : 'true', + 'sep' : '0.2'}) + generator = s2d.GraphGenerator(backend) + generator.generate(visitor, propshdlr, out) + if viewer: p = Popen((viewer, out)) p.wait() diff -r 84738d495ffd -r 793377697c81 devtools/fake.py --- a/devtools/fake.py Wed Sep 24 17:35:59 2014 +0200 +++ b/devtools/fake.py Wed Sep 24 18:04:30 2014 +0200 @@ -24,7 +24,7 @@ from cubicweb.req import RequestSessionBase from cubicweb.cwvreg import CWRegistryStore -from cubicweb.web.request import CubicWebRequestBase +from cubicweb.web.request import ConnectionCubicWebRequestBase from cubicweb.devtools import BASE_URL, BaseApptestConfiguration @@ -53,7 +53,7 @@ return {'system': {'db-driver': 'sqlite'}} -class FakeRequest(CubicWebRequestBase): +class FakeRequest(ConnectionCubicWebRequestBase): """test implementation of an cubicweb request object""" def __init__(self, *args, **kwargs): @@ -88,20 +88,20 @@ return url.split('?', 1)[0] def set_request_header(self, header, value, raw=False): - """set an incoming HTTP header (For test purpose only)""" + """set an incoming HTTP header (for test purpose only)""" if isinstance(value, basestring): value = [value] - if raw: # + if raw: # adding encoded header is important, else page content # will be reconverted back to unicode and apart unefficiency, this # may cause decoding problem (e.g. when downloading a file) self._headers_in.setRawHeaders(header, value) - else: # + else: self._headers_in.setHeader(header, value) # def get_response_header(self, header, default=None, raw=False): - """return output header (For test purpose only""" - if raw: # + """return output header (for test purpose only)""" + if raw: return self.headers_out.getRawHeaders(header, [default])[0] return self.headers_out.getHeader(header, default) @@ -169,7 +169,6 @@ self.config = config or FakeConfig() self.vreg = vreg or CWRegistryStore(self.config, initlog=False) self.vreg.schema = schema - self.sources = [] def internal_session(self): return FakeSession(self) @@ -188,9 +187,6 @@ source.after_entity_insertion(session, extid, entity) return eid - def eid2extid(self, source, eid, session=None): - return self.eids[eid] - class FakeSource(object): dbhelper = get_db_helper('sqlite') diff -r 84738d495ffd -r 793377697c81 devtools/fill.py --- a/devtools/fill.py Wed Sep 24 17:35:59 2014 +0200 +++ b/devtools/fill.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ # -*- coding: iso-8859-1 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -352,7 +352,7 @@ -def select(constraints, cursor, selectvar='O', objtype=None): +def select(constraints, cnx, selectvar='O', objtype=None): """returns list of eids matching should be either 'O' or 'S' to match schema definitions @@ -361,7 +361,7 @@ rql = 'Any %s WHERE %s' % (selectvar, constraints) if objtype: rql += ', %s is %s' % (selectvar, objtype) - rset = cursor.execute(rql) + rset = cnx.execute(rql) except Exception: print "could restrict eid_list with given constraints (%r)" % constraints return [] @@ -369,7 +369,7 @@ -def make_relations_queries(schema, edict, cursor, ignored_relations=(), +def make_relations_queries(schema, edict, cnx, ignored_relations=(), existingrels=None): """returns a list of generated RQL queries for relations :param schema: The instance schema @@ -379,7 +379,7 @@ :param ignored_relations: list of relations to ignore (i.e. don't try to generate insert queries for these relations) """ - gen = RelationsQueriesGenerator(schema, cursor, existingrels) + gen = RelationsQueriesGenerator(schema, cnx, existingrels) return gen.compute_queries(edict, ignored_relations) def composite_relation(rschema): @@ -393,9 +393,9 @@ class RelationsQueriesGenerator(object): rql_tmpl = 'SET S %s O WHERE S eid %%(subjeid)s, O eid %%(objeid)s' - def __init__(self, schema, cursor, existing=None): + def __init__(self, schema, cnx, existing=None): self.schema = schema - self.cursor = cursor + self.cnx = cnx self.existingrels = existing or {} def compute_queries(self, edict, ignored_relations): @@ -457,7 +457,7 @@ # restrict object eids if possible # XXX the attempt to restrict below in completely wrong # disabling it for now - objeids = select(restrictions, self.cursor, objtype=obj) + objeids = select(restrictions, self.cnx, objtype=obj) else: objeids = oedict.get(obj, frozenset()) if subjcard in '?1' or objcard in '?1': diff -r 84738d495ffd -r 793377697c81 devtools/httptest.py --- a/devtools/httptest.py Wed Sep 24 17:35:59 2014 +0200 +++ b/devtools/httptest.py Wed Sep 24 18:04:30 2014 +0200 @@ -89,8 +89,6 @@ * `anonymous_allowed`: flag telling if anonymous browsing should be allowed """ configcls = CubicWebServerConfig - # anonymous is logged by default in cubicweb test cases - anonymous_allowed = True def start_server(self): # use a semaphore to avoid starting test while the http server isn't @@ -104,7 +102,7 @@ reactor.addSystemEventTrigger('after', 'startup', semaphore.release) t = threading.Thread(target=safe_run, name='cubicweb_test_web_server', - args=(self.config, self.vreg, True)) + args=(self.config, True), kwargs={'repo': self.repo}) self.web_thread = t t.start() semaphore.acquire() @@ -185,8 +183,3 @@ # Server could be launched manually print err super(CubicWebServerTC, self).tearDown() - - @classmethod - def init_config(cls, config): - config.set_anonymous_allowed(cls.anonymous_allowed) - super(CubicWebServerTC, cls).init_config(config) diff -r 84738d495ffd -r 793377697c81 devtools/repotest.py --- a/devtools/repotest.py Wed Sep 24 17:35:59 2014 +0200 +++ b/devtools/repotest.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -22,17 +22,12 @@ __docformat__ = "restructuredtext en" -from copy import deepcopy from pprint import pprint -from logilab.common.decorators import clear_cache from logilab.common.testlib import SkipTest -def tuplify(list): - for i in range(len(list)): - if type(list[i]) is not type(()): - list[i] = tuple(list[i]) - return list +def tuplify(mylist): + return [tuple(item) for item in mylist] def snippet_cmp(a, b): a = (a[0], [e.expression for e in a[1]]) @@ -40,17 +35,18 @@ return cmp(a, b) def test_plan(self, rql, expected, kwargs=None): - plan = self._prepare_plan(rql, kwargs) - self.planner.build_plan(plan) - try: - self.assertEqual(len(plan.steps), len(expected), - 'expected %s steps, got %s' % (len(expected), len(plan.steps))) - # step order is important - for i, step in enumerate(plan.steps): - compare_steps(self, step.test_repr(), expected[i]) - except AssertionError: - pprint([step.test_repr() for step in plan.steps]) - raise + with self.session.new_cnx() as cnx: + plan = self._prepare_plan(cnx, rql, kwargs) + self.planner.build_plan(plan) + try: + self.assertEqual(len(plan.steps), len(expected), + 'expected %s steps, got %s' % (len(expected), len(plan.steps))) + # step order is important + for i, step in enumerate(plan.steps): + compare_steps(self, step.test_repr(), expected[i]) + except AssertionError: + pprint([step.test_repr() for step in plan.steps]) + raise def compare_steps(self, step, expected): try: @@ -141,7 +137,7 @@ from rql import RQLHelper -from cubicweb.devtools.fake import FakeRepo, FakeSession +from cubicweb.devtools.fake import FakeRepo, FakeConfig, FakeSession from cubicweb.server import set_debug, debugged from cubicweb.server.querier import QuerierHelper from cubicweb.server.session import Session @@ -159,10 +155,11 @@ raise SkipTest(str(ex)) def setUp(self): - self.repo = FakeRepo(self.schema) + self.repo = FakeRepo(self.schema, config=FakeConfig(apphome=self.datadir)) self.repo.system_source = mock_object(dbdriver=self.backend) - self.rqlhelper = RQLHelper(self.schema, special_relations={'eid': 'uid', - 'has_text': 'fti'}, + self.rqlhelper = RQLHelper(self.schema, + special_relations={'eid': 'uid', + 'has_text': 'fti'}, backend=self.backend) self.qhelper = QuerierHelper(self.repo, self.schema) ExecutionPlan._check_permissions = _dummy_check_permissions @@ -204,27 +201,22 @@ self.ueid = self.session.user.eid assert self.ueid != -1 self.repo._type_source_cache = {} # clear cache - self.cnxset = self.session.set_cnxset() self.maxeid = self.get_max_eid() do_monkey_patch() self._dumb_sessions = [] def get_max_eid(self): - return self.session.execute('Any MAX(X)')[0][0] + with self.session.new_cnx() as cnx: + return cnx.execute('Any MAX(X)')[0][0] + def cleanup(self): - self.session.set_cnxset() - self.session.execute('DELETE Any X WHERE X eid > %s' % self.maxeid) + with self.session.new_cnx() as cnx: + cnx.execute('DELETE Any X WHERE X eid > %s' % self.maxeid) + cnx.commit() def tearDown(self): undo_monkey_patch() - self.session.rollback() self.cleanup() - self.commit() - # properly close dumb sessions - for session in self._dumb_sessions: - session.rollback() - session.close() - self.repo._free_cnxset(self.cnxset) assert self.session.user.eid != -1 def set_debug(self, debug): @@ -239,7 +231,7 @@ rqlhelper._analyser.uid_func_mapping = {} return rqlhelper - def _prepare_plan(self, rql, kwargs=None, simplify=True): + def _prepare_plan(self, cnx, rql, kwargs=None, simplify=True): rqlhelper = self._rqlhelper() rqlst = rqlhelper.parse(rql) rqlhelper.compute_solutions(rqlst, kwargs=kwargs) @@ -247,10 +239,10 @@ rqlhelper.simplify(rqlst) for select in rqlst.children: select.solutions.sort() - return self.o.plan_factory(rqlst, kwargs, self.session) + return self.o.plan_factory(rqlst, kwargs, cnx) - def _prepare(self, rql, kwargs=None): - plan = self._prepare_plan(rql, kwargs, simplify=False) + def _prepare(self, cnx, rql, kwargs=None): + plan = self._prepare_plan(cnx, rql, kwargs, simplify=False) plan.preprocess(plan.rqlst) rqlst = plan.rqlst.children[0] rqlst.solutions = remove_unused_solutions(rqlst, rqlst.solutions, {}, self.repo.schema)[0] @@ -259,72 +251,46 @@ def user_groups_session(self, *groups): """lightweight session using the current user with hi-jacked groups""" # use self.session.user.eid to get correct owned_by relation, unless explicit eid - u = self.repo._build_user(self.session, self.session.user.eid) - u._groups = set(groups) - s = Session(u, self.repo) - s._tx.cnxset = self.cnxset - s._tx.ctx_count = 1 - # register session to ensure it gets closed - self._dumb_sessions.append(s) - return s + with self.session.new_cnx() as cnx: + u = self.repo._build_user(cnx, self.session.user.eid) + u._groups = set(groups) + s = Session(u, self.repo) + return s - def execute(self, rql, args=None, build_descr=True): - return self.o.execute(self.session, rql, args, build_descr) - - def commit(self): - self.session.commit() - self.session.set_cnxset() + def qexecute(self, rql, args=None, build_descr=True): + with self.session.new_cnx() as cnx: + with cnx.ensure_cnx_set: + try: + return self.o.execute(cnx, rql, args, build_descr) + finally: + if rql.startswith(('INSERT', 'DELETE', 'SET')): + cnx.commit() class BasePlannerTC(BaseQuerierTC): - newsources = () def setup(self): - clear_cache(self.repo, 'rel_type_sources') - clear_cache(self.repo, 'rel_type_sources') - clear_cache(self.repo, 'can_cross_relation') - clear_cache(self.repo, 'is_multi_sources_relation') # XXX source_defs self.o = self.repo.querier self.session = self.repo._sessions.values()[0] - self.cnxset = self.session.set_cnxset() self.schema = self.o.schema - self.sources = self.o._repo.sources - self.system = self.sources[-1] + self.system = self.repo.system_source do_monkey_patch() - self._dumb_sessions = [] # by hi-jacked parent setup self.repo.vreg.rqlhelper.backend = 'postgres' # so FTIRANK is considered - self.newsources = [] - - def add_source(self, sourcecls, uri): - source = sourcecls(self.repo, {'uri': uri, 'type': 'whatever'}) - if not source.copy_based_source: - self.sources.append(source) - self.newsources.append(source) - self.repo.sources_by_uri[uri] = source - setattr(self, uri, source) def tearDown(self): - for source in self.newsources: - if not source.copy_based_source: - self.sources.remove(source) - del self.repo.sources_by_uri[source.uri] undo_monkey_patch() - for session in self._dumb_sessions: - if session._tx.cnxset is not None: - session._tx.cnxset = None - session.close() - def _prepare_plan(self, rql, kwargs=None): + def _prepare_plan(self, cnx, rql, kwargs=None): rqlst = self.o.parse(rql, annotate=True) - self.o.solutions(self.session, rqlst, kwargs) + self.o.solutions(cnx, rqlst, kwargs) if rqlst.TYPE == 'select': self.repo.vreg.rqlhelper.annotate(rqlst) for select in rqlst.children: select.solutions.sort() else: rqlst.solutions.sort() - return self.o.plan_factory(rqlst, kwargs, self.session) + return self.o.plan_factory(rqlst, kwargs, cnx) # monkey patch some methods to get predicatable results ####################### @@ -350,7 +316,6 @@ from cubicweb.server.querier import ExecutionPlan _orig_check_permissions = ExecutionPlan._check_permissions -_orig_init_temp_table = ExecutionPlan.init_temp_table def _check_permissions(*args, **kwargs): res, restricted = _orig_check_permissions(*args, **kwargs) @@ -360,15 +325,6 @@ def _dummy_check_permissions(self, rqlst): return {(): rqlst.solutions}, set() -def _init_temp_table(self, table, selection, solution): - if self.tablesinorder is None: - tablesinorder = self.tablesinorder = {} - else: - tablesinorder = self.tablesinorder - if not table in tablesinorder: - tablesinorder[table] = 'table%s' % len(tablesinorder) - return _orig_init_temp_table(self, table, selection, solution) - from cubicweb.server import rqlannotation _orig_select_principal = rqlannotation._select_principal @@ -381,40 +337,6 @@ return _orig_select_principal(scope, relations, _sort=lambda rels: sorted(rels, key=sort_key)) -try: - from cubicweb.server.msplanner import PartPlanInformation -except ImportError: - class PartPlanInformation(object): - def merge_input_maps(self, *args, **kwargs): - pass - def _choose_term(self, sourceterms): - pass -_orig_merge_input_maps = PartPlanInformation.merge_input_maps -_orig_choose_term = PartPlanInformation._choose_term - -def _merge_input_maps(*args, **kwargs): - return sorted(_orig_merge_input_maps(*args, **kwargs)) - -def _choose_term(self, source, sourceterms): - # predictable order for test purpose - def get_key(x): - try: - # variable - return x.name - except AttributeError: - try: - # relation - return x.r_type - except AttributeError: - # const - return x.value - return _orig_choose_term(self, source, DumbOrderedDict2(sourceterms, get_key)) - -from cubicweb.server.sources.pyrorql import PyroRQLSource -_orig_syntax_tree_search = PyroRQLSource.syntax_tree_search - -def _syntax_tree_search(*args, **kwargs): - return deepcopy(_orig_syntax_tree_search(*args, **kwargs)) def _ordered_iter_relations(stinfo): return sorted(_orig_iter_relations(stinfo), key=lambda x:x.r_type) @@ -425,17 +347,9 @@ rqlrewrite.RQLRewriter.build_variantes = _build_variantes ExecutionPlan._check_permissions = _check_permissions ExecutionPlan.tablesinorder = None - ExecutionPlan.init_temp_table = _init_temp_table - PartPlanInformation.merge_input_maps = _merge_input_maps - PartPlanInformation._choose_term = _choose_term - PyroRQLSource.syntax_tree_search = _syntax_tree_search def undo_monkey_patch(): rqlrewrite.iter_relations = _orig_iter_relations rqlrewrite.RQLRewriter.insert_snippets = _orig_insert_snippets rqlrewrite.RQLRewriter.build_variantes = _orig_build_variantes ExecutionPlan._check_permissions = _orig_check_permissions - ExecutionPlan.init_temp_table = _orig_init_temp_table - PartPlanInformation.merge_input_maps = _orig_merge_input_maps - PartPlanInformation._choose_term = _orig_choose_term - PyroRQLSource.syntax_tree_search = _orig_syntax_tree_search diff -r 84738d495ffd -r 793377697c81 devtools/test/unittest_dbfill.py --- a/devtools/test/unittest_dbfill.py Wed Sep 24 17:35:59 2014 +0200 +++ b/devtools/test/unittest_dbfill.py Wed Sep 24 18:04:30 2014 +0200 @@ -86,7 +86,7 @@ # Test for random index for index in range(5): cost_value = self.bug_valgen.generate_attribute_value({}, 'cost', index) - self.assertTrue(cost_value in range(index+1)) + self.assertIn(cost_value, range(index+1)) def test_date(self): """test date generation""" diff -r 84738d495ffd -r 793377697c81 devtools/test/unittest_fill.py --- a/devtools/test/unittest_fill.py Wed Sep 24 17:35:59 2014 +0200 +++ b/devtools/test/unittest_fill.py Wed Sep 24 18:04:30 2014 +0200 @@ -39,31 +39,31 @@ def test_autoextend(self): - self.assertFalse('generate_server' in dir(ValueGenerator)) + self.assertNotIn('generate_server', dir(ValueGenerator)) class MyValueGenerator(ValueGenerator): def generate_server(self, index): return attrname - self.assertTrue('generate_server' in dir(ValueGenerator)) + self.assertIn('generate_server', dir(ValueGenerator)) def test_bad_signature_detection(self): - self.assertFalse('generate_server' in dir(ValueGenerator)) + self.assertNotIn('generate_server', dir(ValueGenerator)) try: class MyValueGenerator(ValueGenerator): def generate_server(self): pass except TypeError: - self.assertFalse('generate_server' in dir(ValueGenerator)) + self.assertNotIn('generate_server', dir(ValueGenerator)) else: self.fail('TypeError not raised') def test_signature_extension(self): - self.assertFalse('generate_server' in dir(ValueGenerator)) + self.assertNotIn('generate_server', dir(ValueGenerator)) class MyValueGenerator(ValueGenerator): def generate_server(self, index, foo): pass - self.assertTrue('generate_server' in dir(ValueGenerator)) + self.assertIn('generate_server', dir(ValueGenerator)) if __name__ == '__main__': diff -r 84738d495ffd -r 793377697c81 devtools/test/unittest_testlib.py --- a/devtools/test/unittest_testlib.py Wed Sep 24 17:35:59 2014 +0200 +++ b/devtools/test/unittest_testlib.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -39,11 +39,13 @@ class MyWebTest(CubicWebTC): def test_error_view(self): - self.request().create_entity('Bug', title=u"bt") - self.view('raising', self.execute('Bug B'), template=None) + with self.admin_access.web_request() as req: + req.create_entity('Bug', title=u"bt") + self.view('raising', req.execute('Bug B'), template=None, req=req) def test_correct_view(self): - self.view('primary', self.execute('CWUser U'), template=None) + with self.admin_access.web_request() as req: + self.view('primary', req.execute('CWUser U'), template=None, req=req) tests = [MyWebTest('test_error_view'), MyWebTest('test_correct_view')] result = self.runner.run(TestSuite(tests)) @@ -98,6 +100,7 @@ class HTMLPageInfoTC(TestCase): """test cases for PageInfo""" + def setUp(self): parser = htmlparser.HTMLValidator() # disable cleanup that would remove doctype @@ -113,7 +116,6 @@ parser = htmlparser.DTDValidator() self.assertRaises(AssertionError, parser.parse_string, HTML_PAGE_ERROR) - def test_has_title_no_level(self): """tests h? tags information""" self.assertEqual(self.page_info.has_title('Test'), True) @@ -160,6 +162,7 @@ class CWUtilitiesTC(CubicWebTC): + def test_temporary_permissions_eschema(self): eschema = self.schema['CWUser'] with self.temporary_permissions(CWUser={'read': ()}): @@ -175,11 +178,13 @@ self.assertTrue(rdef.permissions['read'], ()) def test_temporary_appobjects_registered(self): + class AnAppobject(object): __registries__ = ('hip',) __regid__ = 'hop' __select__ = yes() registered = None + @classmethod def __registered__(cls, reg): cls.registered = reg @@ -189,5 +194,63 @@ self.assertIn(AnAppobject, self.vreg['hip']['hop']) self.assertNotIn(AnAppobject, self.vreg['hip']['hop']) + def test_login(self): + """Calling login should not break hook control""" + with self.admin_access.repo_cnx() as cnx: + self.hook_executed = False + self.create_user(cnx, 'babar') + cnx.commit() + + from cubicweb.server import hook + from cubicweb.predicates import is_instance + + class MyHook(hook.Hook): + __regid__ = 'whatever' + __select__ = hook.Hook.__select__ & is_instance('CWProperty') + category = 'test-hook' + events = ('after_add_entity',) + test = self + + def __call__(self): + self.test.hook_executed = True + + with self.new_access('babar').repo_cnx() as cnx: + with self.temporary_appobjects(MyHook): + with cnx.allow_all_hooks_but('test-hook'): + prop = cnx.create_entity('CWProperty', pkey=u'ui.language', value=u'en') + cnx.commit() + self.assertFalse(self.hook_executed) + + +class RepoAccessTC(CubicWebTC): + + def test_repo_connection(self): + acc = self.new_access('admin') + with acc.repo_cnx() as cnx: + rset = cnx.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + + def test_client_connection(self): + acc = self.new_access('admin') + with acc.client_cnx() as cnx: + rset = cnx.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + + def test_web_request(self): + acc = self.new_access('admin') + with acc.web_request(elephant='babar') as req: + rset = req.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + self.assertEqual('babar', req.form['elephant']) + + def test_close(self): + acc = self.new_access('admin') + acc.close() + + def test_admin_access(self): + with self.admin_access.client_cnx() as cnx: + self.assertEqual('admin', cnx.user.login) + + if __name__ == '__main__': unittest_main() diff -r 84738d495ffd -r 793377697c81 devtools/test/unittest_webtest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/unittest_webtest.py Wed Sep 24 18:04:30 2014 +0200 @@ -0,0 +1,40 @@ +import httplib + +from logilab.common.testlib import Tags +from cubicweb.devtools.webtest import CubicWebTestTC + + +class CWTTC(CubicWebTestTC): + def test_response(self): + response = self.webapp.get('/') + self.assertEqual(200, response.status_int) + + def test_base_url(self): + if self.config['base-url'] not in self.webapp.get('/').text: + self.fail('no mention of base url in retrieved page') + + +class CWTIdentTC(CubicWebTestTC): + anonymous_allowed = False + tags = CubicWebTestTC.tags | Tags(('auth',)) + + def test_reponse_denied(self): + res = self.webapp.get('/', expect_errors=True) + self.assertEqual(httplib.FORBIDDEN, res.status_int) + + def test_login(self): + res = self.webapp.get('/', expect_errors=True) + self.assertEqual(httplib.FORBIDDEN, res.status_int) + + self.login(self.admlogin, self.admpassword) + res = self.webapp.get('/') + self.assertEqual(httplib.OK, res.status_int) + + self.logout() + res = self.webapp.get('/', expect_errors=True) + self.assertEqual(httplib.FORBIDDEN, res.status_int) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 84738d495ffd -r 793377697c81 devtools/testlib.py --- a/devtools/testlib.py Wed Sep 24 17:35:59 2014 +0200 +++ b/devtools/testlib.py Wed Sep 24 18:04:30 2014 +0200 @@ -39,12 +39,14 @@ from logilab.common.deprecation import deprecated, class_deprecated from logilab.common.shellutils import getlogin -from cubicweb import ValidationError, NoSelectableObject -from cubicweb import cwconfig, dbapi, devtools, web, server +from cubicweb import (ValidationError, NoSelectableObject, AuthenticationError, + ProgrammingError, BadConnectionId) +from cubicweb import cwconfig, devtools, web, server, repoapi from cubicweb.utils import json from cubicweb.sobjects import notification from cubicweb.web import Redirect, application from cubicweb.server.hook import SendMailOp +from cubicweb.server.session import Session from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS from cubicweb.devtools import fake, htmlparser, DEFAULT_EMPTY_DB_ID from cubicweb.utils import json @@ -155,7 +157,7 @@ class TestCaseConnectionProxy(object): - """thin wrapper around `cubicweb.dbapi.Connection` context-manager + """thin wrapper around `cubicweb.repoapi.ClientConnection` context-manager used in CubicWebTC (cf. `cubicweb.devtools.testlib.CubicWebTC.login` method) It just proxies to the default connection context manager but @@ -169,15 +171,106 @@ return getattr(self.cnx, attrname) def __enter__(self): - return self.cnx.__enter__() + # already open + return self.cnx def __exit__(self, exctype, exc, tb): try: return self.cnx.__exit__(exctype, exc, tb) finally: - self.cnx.close() self.testcase.restore_connection() +# Repoaccess utility ###############################################3########### + +class RepoAccess(object): + """An helper to easily create object to access the repo as a specific user + + Each RepoAccess have it own session. + + A repo access can create three type of object: + + .. automethod:: cubicweb.testlib.RepoAccess.repo_cnx + .. automethod:: cubicweb.testlib.RepoAccess.client_cnx + .. automethod:: cubicweb.testlib.RepoAccess.web_request + + The RepoAccess need to be closed to destroy the associated Session. + TestCase usually take care of this aspect for the user. + + .. automethod:: cubicweb.testlib.RepoAccess.close + """ + + def __init__(self, repo, login, requestcls): + self._repo = repo + self._login = login + self.requestcls = requestcls + self._session = self._unsafe_connect(login) + + def _unsafe_connect(self, login, **kwargs): + """ a completely unsafe connect method for the tests """ + # use an internal connection + with self._repo.internal_cnx() as cnx: + # try to get a user object + user = cnx.find('CWUser', login=login).one() + user.groups + user.properties + user.login + session = Session(user, self._repo) + self._repo._sessions[session.sessionid] = session + user._cw = user.cw_rset.req = session + with session.new_cnx() as cnx: + self._repo.hm.call_hooks('session_open', cnx) + # commit connection at this point in case write operation has been + # done during `session_open` hooks + cnx.commit() + return session + + @contextmanager + def repo_cnx(self): + """Context manager returning a server side connection for the user""" + with self._session.new_cnx() as cnx: + yield cnx + + @contextmanager + def client_cnx(self): + """Context manager returning a client side connection for the user""" + with repoapi.ClientConnection(self._session) as cnx: + yield cnx + + @contextmanager + def web_request(self, url=None, headers={}, method='GET', **kwargs): + """Context manager returning a web request pre-linked to a client cnx + + To commit and rollback use:: + + req.cnx.commit() + req.cnx.rolback() + """ + req = self.requestcls(self._repo.vreg, url=url, headers=headers, + method=method, form=kwargs) + clt_cnx = repoapi.ClientConnection(self._session) + req.set_cnx(clt_cnx) + with clt_cnx: + yield req + + def close(self): + """Close the session associated to the RepoAccess""" + if self._session is not None: + self._repo.close(self._session.sessionid) + self._session = None + + @contextmanager + def shell(self): + from cubicweb.server.migractions import ServerMigrationHelper + with repoapi.ClientConnection(self._session) as cnx: + mih = ServerMigrationHelper(None, repo=self._repo, cnx=cnx, + interactive=False, + # hack so it don't try to load fs schema + schema=1) + yield mih + cnx.commit() + + + # base class for cubicweb tests requiring a full cw environments ############### class CubicWebTC(TestCase): @@ -188,7 +281,7 @@ * `vreg`, the vregistry * `schema`, self.vreg.schema * `config`, cubicweb configuration - * `cnx`, dbapi connection to the repository using an admin user + * `cnx`, repoapi connection to the repository using an admin user * `session`, server side session associated to `cnx` * `app`, the cubicweb publisher (for web testing) * `repo`, the repository object @@ -198,21 +291,200 @@ """ appid = 'data' configcls = devtools.ApptestConfiguration - reset_schema = reset_vreg = False # reset schema / vreg between tests + requestcls = fake.FakeRequest tags = TestCase.tags | Tags('cubicweb', 'cw_repo') test_db_id = DEFAULT_EMPTY_DB_ID _cnxs = set() # establised connection - _cnx = None # current connection + # stay on connection for leak detection purpose + + # anonymous is logged by default in cubicweb test cases + anonymous_allowed = True + + def __init__(self, *args, **kwargs): + self._admin_session = None + self._admin_clt_cnx = None + self._current_session = None + self._current_clt_cnx = None + self.repo = None + self._open_access = set() + super(CubicWebTC, self).__init__(*args, **kwargs) + + # repository connection handling ########################################### + + def new_access(self, login): + """provide a new RepoAccess object for a given user + + The access is automatically closed at the end of the test.""" + access = RepoAccess(self.repo, login, self.requestcls) + self._open_access.add(access) + return access + + def _close_access(self): + while self._open_access: + try: + self._open_access.pop().close() + except BadConnectionId: + continue # already closed + + @deprecated('[3.19] explicitly use RepoAccess object in test instead') + def set_cnx(self, cnx): + assert getattr(cnx, '_session', None) is not None + if cnx is self._admin_clt_cnx: + self._pop_custom_cnx() + else: + self._cnxs.add(cnx) # register the cnx to make sure it is removed + self._current_session = cnx._session + self._current_clt_cnx = cnx - # Too much complicated stuff. the class doesn't need to bear the repo anymore - @classmethod - def set_cnx(cls, cnx): - cls._cnxs.add(cnx) - cls._cnx = cnx + @property + @deprecated('[3.19] explicitly use RepoAccess object in test instead') + def cnx(self): + # XXX we want to deprecate this + clt_cnx = self._current_clt_cnx + if clt_cnx is None: + clt_cnx = self._admin_clt_cnx + return clt_cnx + + def _close_cnx(self): + """ensure that all cnx used by a test have been closed""" + for cnx in list(self._cnxs): + if cnx._open and not cnx._session.closed: + cnx.rollback() + cnx.close() + self._cnxs.remove(cnx) + + @property + @deprecated('[3.19] explicitly use RepoAccess object in test instead') + def session(self): + """return current server side session""" + # XXX We want to use a srv_connection instead and deprecate this + # property + session = self._current_session + if session is None: + session = self._admin_session + # bypassing all sanity to use the same repo cnx in the session + # + # we can't call set_cnx as the Connection is not managed by the + # session. + session._Session__threaddata.cnx = self._admin_clt_cnx._cnx + else: + session._Session__threaddata.cnx = self.cnx._cnx + session.set_cnxset() + return session + + @property + @deprecated('[3.19] explicitly use RepoAccess object in test instead') + def websession(self): + return self.session @property - def cnx(self): - return self.__class__._cnx + @deprecated('[3.19] explicitly use RepoAccess object in test instead') + def adminsession(self): + """return current server side session (using default manager account)""" + return self._admin_session + + @deprecated('[3.19] explicitly use RepoAccess object in test instead') + def login(self, login, **kwargs): + """return a connection for the given login/password""" + __ = kwargs.pop('autoclose', True) # not used anymore + if login == self.admlogin: + # undo any previous login, if we're not used as a context manager + self.restore_connection() + return self.cnx + else: + if not kwargs: + kwargs['password'] = str(login) + clt_cnx = repoapi.connect(self.repo, login, **kwargs) + self.set_cnx(clt_cnx) + clt_cnx.__enter__() + return TestCaseConnectionProxy(self, clt_cnx) + + @deprecated('[3.19] explicitly use RepoAccess object in test instead') + def restore_connection(self): + self._pop_custom_cnx() + + def _pop_custom_cnx(self): + if self._current_clt_cnx is not None: + if self._current_clt_cnx._open: + self._current_clt_cnx.close() + if not self._current_session.closed: + self.repo.close(self._current_session.sessionid) + self._current_clt_cnx = None + self._current_session = None + + #XXX this doesn't need to a be classmethod anymore + def _init_repo(self): + """init the repository and connection to it. + """ + # get or restore and working db. + db_handler = devtools.get_test_db_handler(self.config, self.init_config) + db_handler.build_db_cache(self.test_db_id, self.pre_setup_database) + db_handler.restore_database(self.test_db_id) + self.repo = db_handler.get_repo(startup=True) + # get an admin session (without actual login) + login = unicode(db_handler.config.default_admin_config['login']) + self.admin_access = self.new_access(login) + self._admin_session = self.admin_access._session + self._admin_clt_cnx = repoapi.ClientConnection(self._admin_session) + self._cnxs.add(self._admin_clt_cnx) + self._admin_clt_cnx.__enter__() + self.config.repository = lambda x=None: self.repo + + # db api ################################################################## + + @nocoverage + @deprecated('[3.19] explicitly use RepoAccess object in test instead') + def cursor(self, req=None): + if req is not None: + return req.cnx + else: + return self.cnx + + @nocoverage + @deprecated('[3.19] explicitly use RepoAccess object in test instead') + def execute(self, rql, args=None, req=None): + """executes , builds a resultset, and returns a couple (rset, req) + where req is a FakeRequest + """ + req = req or self.request(rql=rql) + return req.execute(unicode(rql), args) + + @nocoverage + @deprecated('[3.19] explicitly use RepoAccess object in test instead') + def commit(self): + try: + return self.cnx.commit() + finally: + self.session.set_cnxset() # ensure cnxset still set after commit + + @nocoverage + @deprecated('[3.19] explicitly use RepoAccess object in test instead') + def rollback(self): + try: + self.cnx.rollback() + except ProgrammingError: + pass # connection closed + finally: + self.session.set_cnxset() # ensure cnxset still set after commit + + @deprecated('[3.19] explicitly use RepoAccess object in test instead') + def request(self, rollbackfirst=False, url=None, headers={}, **kwargs): + """return a web ui request""" + if rollbackfirst: + self.cnx.rollback() + req = self.requestcls(self.vreg, url=url, headers=headers, form=kwargs) + req.set_cnx(self.cnx) + return req + + # server side db api ####################################################### + + @deprecated('[3.19] explicitly use RepoAccess object in test instead') + def sexecute(self, rql, args=None): + self.session.set_cnxset() + return self.session.execute(rql, args) + + + # config management ######################################################## @classproperty def config(cls): @@ -229,15 +501,19 @@ config.mode = 'test' return config - @classmethod + @classmethod # XXX could be turned into a regular method def init_config(cls, config): """configuration initialization hooks. You may only want to override here the configuraton logic. Otherwise, consider to use a different :class:`ApptestConfiguration` - defined in the `configcls` class attribute""" - source = config.sources()['system'] + defined in the `configcls` class attribute. + + This method will be called by the database handler once the config has + been properly bootstrapped. + """ + source = config.system_source_config cls.admlogin = unicode(source['db-user']) cls.admpassword = source['db-password'] # uncomment the line below if you want rql queries to be logged @@ -259,33 +535,12 @@ config.global_set_option('embed-allowed', re.compile('.*')) except Exception: # not in server only configuration pass - - #XXX this doesn't need to a be classmethod anymore - @classmethod - def _init_repo(cls): - """init the repository and connection to it. - """ - # setup configuration for test - cls.init_config(cls.config) - # get or restore and working db. - db_handler = devtools.get_test_db_handler(cls.config) - db_handler.build_db_cache(cls.test_db_id, cls.pre_setup_database) + config.set_anonymous_allowed(cls.anonymous_allowed) - cls.repo, cnx = db_handler.get_repo_and_cnx(cls.test_db_id) - # no direct assignation to cls.cnx anymore. - # cnx is now an instance property that use a class protected attributes. - cls.set_cnx(cnx) - cls.vreg = cls.repo.vreg - cls.websession = dbapi.DBAPISession(cnx, cls.admlogin) - cls._orig_cnx = (cnx, cls.websession) - cls.config.repository = lambda x=None: cls.repo + @property + def vreg(self): + return self.repo.vreg - def _close_cnx(self): - for cnx in list(self._cnxs): - if not cnx._closed: - cnx.rollback() - cnx.close() - self._cnxs.remove(cnx) # global resources accessors ############################################### @@ -294,18 +549,7 @@ """return the application schema""" return self.vreg.schema - @property - def session(self): - """return current server side session (using default manager account)""" - session = self.repo._sessions[self.cnx.sessionid] - session.set_cnxset() - return session - - @property - def adminsession(self): - """return current server side session (using default manager account)""" - return self.repo._sessions[self._orig_cnx[0].sessionid] - + @deprecated('[3.19] explicitly use RepoAccess object in test instead') def shell(self): """return a shell session object""" from cubicweb.server.migractions import ServerMigrationHelper @@ -338,13 +582,22 @@ except Exception as ex: self.__class__._repo_init_failed = ex raise + self.addCleanup(self._close_access) resume_tracing() self.setup_database() - self.commit() + self._admin_clt_cnx.commit() MAILBOX[:] = [] # reset mailbox def tearDown(self): # XXX hack until logilab.common.testlib is fixed + if self._admin_clt_cnx is not None: + if self._admin_clt_cnx._open: + self._admin_clt_cnx.close() + self._admin_clt_cnx = None + if self._admin_session is not None: + if not self._admin_session.closed: + self.repo.close(self._admin_session.sessionid) + self._admin_session = None while self._cleanups: cleanup, args, kwargs = self._cleanups.pop(-1) cleanup(*args, **kwargs) @@ -361,7 +614,7 @@ """add your database setup code by overriding this method""" @classmethod - def pre_setup_database(cls, session, config): + def pre_setup_database(cls, cnx, config): """add your pre database setup code by overriding this method Do not forget to set the cls.test_db_id value to enable caching of the @@ -370,11 +623,11 @@ # user / session management ############################################### + @deprecated('[3.19] explicitly use RepoAccess object in test instead') def user(self, req=None): """return the application schema""" if req is None: - req = self.request() - return self.cnx.user(req) + return self.request().user else: return req.user @@ -392,7 +645,7 @@ groups = login login = req assert not isinstance(self, type) - req = self._orig_cnx[0].request() + req = self._admin_clt_cnx if password is None: password = login.encode('utf8') user = req.create_entity('CWUser', login=unicode(login), @@ -411,65 +664,6 @@ req.cnx.commit() return user - def login(self, login, **kwargs): - """return a connection for the given login/password""" - if login == self.admlogin: - self.restore_connection() - # definitly don't want autoclose when used as a context manager - return self.cnx - autoclose = kwargs.pop('autoclose', True) - if not kwargs: - kwargs['password'] = str(login) - self.set_cnx(dbapi._repo_connect(self.repo, unicode(login), **kwargs)) - self.websession = dbapi.DBAPISession(self.cnx) - if login == self.vreg.config.anonymous_user()[0]: - self.cnx.anonymous_connection = True - if autoclose: - return TestCaseConnectionProxy(self, self.cnx) - return self.cnx - - def restore_connection(self): - if not self.cnx is self._orig_cnx[0]: - if not self.cnx._closed: - self.cnx.close() - cnx, self.websession = self._orig_cnx - self.set_cnx(cnx) - - # db api ################################################################## - - @nocoverage - def cursor(self, req=None): - return self.cnx.cursor(req or self.request()) - - @nocoverage - def execute(self, rql, args=None, req=None): - """executes , builds a resultset, and returns a couple (rset, req) - where req is a FakeRequest - """ - req = req or self.request(rql=rql) - return req.execute(unicode(rql), args) - - @nocoverage - def commit(self): - try: - return self.cnx.commit() - finally: - self.session.set_cnxset() # ensure cnxset still set after commit - - @nocoverage - def rollback(self): - try: - self.cnx.rollback() - except dbapi.ProgrammingError: - pass # connection closed - finally: - self.session.set_cnxset() # ensure cnxset still set after commit - - # server side db api ####################################################### - - def sexecute(self, rql, args=None): - self.session.set_cnxset() - return self.session.execute(rql, args) # other utilities ######################################################### @@ -507,14 +701,13 @@ .. sourcecode:: python - rdef = self.schema['CWUser'].rdef('login') with self.temporary_permissions(CWUser={'read': ()}): ... - Usually the former will be prefered to override permissions on a + Usually the former will be preferred to override permissions on a relation definition, while the latter is well suited for entity types. - The allowed keys in the permission dictionary depends on the schema type + The allowed keys in the permission dictionary depend on the schema type (entity type / relation definition). Resulting permissions will be similar to `orig_permissions.update(partial_perms)`. """ @@ -634,12 +827,13 @@ def list_startup_views(self): """returns the list of startup views""" - req = self.request() - for view in self.vreg['views'].possible_views(req, None): - if view.category == 'startupview': - yield view.__regid__ - else: - not_selected(self.vreg, view) + with self.admin_access.web_request() as req: + for view in self.vreg['views'].possible_views(req, None): + if view.category == 'startupview': + yield view.__regid__ + else: + not_selected(self.vreg, view) + # web ui testing utilities ################################################# @@ -647,21 +841,13 @@ @cached def app(self): """return a cubicweb publisher""" - publisher = application.CubicWebPublisher(self.config, vreg=self.vreg) + publisher = application.CubicWebPublisher(self.repo, self.config) def raise_error_handler(*args, **kwargs): raise publisher.error_handler = raise_error_handler return publisher - requestcls = fake.FakeRequest - def request(self, rollbackfirst=False, url=None, headers={}, **kwargs): - """return a web ui request""" - req = self.requestcls(self.vreg, url=url, headers=headers, form=kwargs) - if rollbackfirst: - self.websession.cnx.rollback() - req.set_session(self.websession) - return req - + @deprecated('[3.19] use the .remote_calling method') def remote_call(self, fname, *args): """remote json call simulation""" dump = json.dumps @@ -670,6 +856,14 @@ ctrl = self.vreg['controllers'].select('ajax', req) return ctrl.publish(), req + @contextmanager + def remote_calling(self, fname, *args): + """remote json call simulation""" + args = [json.dumps(arg) for arg in args] + with self.admin_access.web_request(fname=fname, pageid='123', arg=args) as req: + ctrl = self.vreg['controllers'].select('ajax', req) + yield ctrl.publish(), req + def app_handle_request(self, req, path='view'): return self.app.core_handle(req, path) @@ -689,6 +883,7 @@ raise return result + @deprecated('[3.19] use .admin_request_from_url instead') def req_from_url(self, url): """parses `url` and builds the corresponding CW-web request @@ -702,6 +897,20 @@ req.setup_params(params) return req + @contextmanager + def admin_request_from_url(self, url): + """parses `url` and builds the corresponding CW-web request + + req.form will be setup using the url's query string + """ + with self.admin_access.web_request(url=url) as req: + if isinstance(url, unicode): + url = url.encode(req.encoding) # req.setup_params() expects encoded strings + querystring = urlparse.urlparse(url)[-2] + params = urlparse.parse_qs(querystring) + req.setup_params(params) + yield req + def url_publish(self, url, data=None): """takes `url`, uses application's app_resolver to find the appropriate controller and result set, then publishes the result. @@ -712,22 +921,22 @@ This should pretty much correspond to what occurs in a real CW server except the apache-rewriter component is not called. """ - req = self.req_from_url(url) - if data is not None: - req.form.update(data) - ctrlid, rset = self.app.url_resolver.process(req, req.relative_path(False)) - return self.ctrl_publish(req, ctrlid, rset) + with self.admin_request_from_url(url) as req: + if data is not None: + req.form.update(data) + ctrlid, rset = self.app.url_resolver.process(req, req.relative_path(False)) + return self.ctrl_publish(req, ctrlid, rset) def http_publish(self, url, data=None): """like `url_publish`, except this returns a http response, even in case of errors. You may give form parameters using the `data` argument. """ - req = self.req_from_url(url) - if data is not None: - req.form.update(data) - with real_error_handling(self.app): - result = self.app_handle_request(req, req.relative_path(False)) - return result, req + with self.admin_request_from_url(url) as req: + if data is not None: + req.form.update(data) + with real_error_handling(self.app): + result = self.app_handle_request(req, req.relative_path(False)) + return result, req @staticmethod def _parse_location(req, location): @@ -779,33 +988,29 @@ def init_authentication(self, authmode, anonuser=None): self.set_auth_mode(authmode, anonuser) - req = self.request(url='login') - origsession = req.session - req.session = req.cnx = None - del req.execute # get back to class implementation + req = self.requestcls(self.vreg, url='login') sh = self.app.session_handler authm = sh.session_manager.authmanager authm.anoninfo = self.vreg.config.anonymous_user() authm.anoninfo = authm.anoninfo[0], {'password': authm.anoninfo[1]} # not properly cleaned between tests self.open_sessions = sh.session_manager._sessions = {} - return req, origsession + return req, self.session def assertAuthSuccess(self, req, origsession, nbsessions=1): sh = self.app.session_handler - self.app.connect(req) - session = req.session + session = self.app.get_session(req) + clt_cnx = repoapi.ClientConnection(session) + req.set_cnx(clt_cnx) self.assertEqual(len(self.open_sessions), nbsessions, self.open_sessions) self.assertEqual(session.login, origsession.login) self.assertEqual(session.anonymous_session, False) def assertAuthFailure(self, req, nbsessions=0): - self.app.connect(req) - self.assertIsInstance(req.session, dbapi.DBAPISession) - self.assertEqual(req.session.cnx, None) - self.assertIsInstance(req.cnx, (dbapi._NeedAuthAccessMock, NoneType)) - # + 1 since we should still have session without connection set - self.assertEqual(len(self.open_sessions), nbsessions + 1) + with self.assertRaises(AuthenticationError): + self.app.get_session(req) + # +0 since we do not track the opened session + self.assertEqual(len(self.open_sessions), nbsessions) clear_cache(req, 'get_authorization') # content validation ####################################################### @@ -1002,7 +1207,7 @@ # XXX cleanup unprotected_entities & all mess -def how_many_dict(schema, cursor, how_many, skip): +def how_many_dict(schema, cnx, how_many, skip): """given a schema, compute how many entities by type we need to be able to satisfy relations cardinality. @@ -1036,7 +1241,7 @@ # step 1, compute a base number of each entity types: number of already # existing entities of this type + `how_many` for etype in unprotected_entities(schema, strict=True): - howmanydict[str(etype)] = cursor.execute('Any COUNT(X) WHERE X is %s' % etype)[0][0] + howmanydict[str(etype)] = cnx.execute('Any COUNT(X) WHERE X is %s' % etype)[0][0] if etype in unprotected: howmanydict[str(etype)] += how_many # step 2, augment nb entity per types to satisfy cardinality constraints, @@ -1070,10 +1275,10 @@ def to_test_etypes(self): return unprotected_entities(self.schema, strict=True) - def custom_populate(self, how_many, cursor): + def custom_populate(self, how_many, cnx): pass - def post_populate(self, cursor): + def post_populate(self, cnx): pass @@ -1082,77 +1287,79 @@ """this method populates the database with `how_many` entities of each possible type. It also inserts random relations between them """ - with self.session.security_enabled(read=False, write=False): - self._auto_populate(how_many) + with self.admin_access.repo_cnx() as cnx: + with cnx.security_enabled(read=False, write=False): + self._auto_populate(cnx, how_many) + cnx.commit() - def _auto_populate(self, how_many): - cu = self.cursor() - self.custom_populate(how_many, cu) + def _auto_populate(self, cnx, how_many): + self.custom_populate(how_many, cnx) vreg = self.vreg - howmanydict = how_many_dict(self.schema, cu, how_many, self.no_auto_populate) + howmanydict = how_many_dict(self.schema, cnx, how_many, self.no_auto_populate) for etype in unprotected_entities(self.schema): if etype in self.no_auto_populate: continue nb = howmanydict.get(etype, how_many) for rql, args in insert_entity_queries(etype, self.schema, vreg, nb): - cu.execute(rql, args) + cnx.execute(rql, args) edict = {} for etype in unprotected_entities(self.schema, strict=True): - rset = cu.execute('%s X' % etype) + rset = cnx.execute('%s X' % etype) edict[str(etype)] = set(row[0] for row in rset.rows) existingrels = {} ignored_relations = SYSTEM_RELATIONS | self.ignored_relations for rschema in self.schema.relations(): if rschema.final or rschema in ignored_relations: continue - rset = cu.execute('DISTINCT Any X,Y WHERE X %s Y' % rschema) + rset = cnx.execute('DISTINCT Any X,Y WHERE X %s Y' % rschema) existingrels.setdefault(rschema.type, set()).update((x, y) for x, y in rset) - q = make_relations_queries(self.schema, edict, cu, ignored_relations, + q = make_relations_queries(self.schema, edict, cnx, ignored_relations, existingrels=existingrels) for rql, args in q: try: - cu.execute(rql, args) + cnx.execute(rql, args) except ValidationError as ex: # failed to satisfy some constraint print 'error in automatic db population', ex - self.session.commit_state = None # reset uncommitable flag - self.post_populate(cu) - self.commit() + cnx.commit_state = None # reset uncommitable flag + self.post_populate(cnx) def iter_individual_rsets(self, etypes=None, limit=None): etypes = etypes or self.to_test_etypes() - for etype in etypes: - if limit: - rql = 'Any X LIMIT %s WHERE X is %s' % (limit, etype) - else: - rql = 'Any X WHERE X is %s' % etype - rset = self.execute(rql) - for row in xrange(len(rset)): - if limit and row > limit: - break - # XXX iirk - rset2 = rset.limit(limit=1, offset=row) - yield rset2 + with self.admin_access.web_request() as req: + for etype in etypes: + if limit: + rql = 'Any X LIMIT %s WHERE X is %s' % (limit, etype) + else: + rql = 'Any X WHERE X is %s' % etype + rset = req.execute(rql) + for row in xrange(len(rset)): + if limit and row > limit: + break + # XXX iirk + rset2 = rset.limit(limit=1, offset=row) + yield rset2 def iter_automatic_rsets(self, limit=10): """generates basic resultsets for each entity type""" etypes = self.to_test_etypes() if not etypes: return - for etype in etypes: - yield self.execute('Any X LIMIT %s WHERE X is %s' % (limit, etype)) - etype1 = etypes.pop() - try: - etype2 = etypes.pop() - except KeyError: - etype2 = etype1 - # test a mixed query (DISTINCT/GROUP to avoid getting duplicate - # X which make muledit view failing for instance (html validation fails - # because of some duplicate "id" attributes) - yield self.execute('DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is %s, Y is %s' % (etype1, etype2)) - # test some application-specific queries if defined - for rql in self.application_rql: - yield self.execute(rql) + with self.admin_access.web_request() as req: + for etype in etypes: + yield req.execute('Any X LIMIT %s WHERE X is %s' % (limit, etype)) + etype1 = etypes.pop() + try: + etype2 = etypes.pop() + except KeyError: + etype2 = etype1 + # test a mixed query (DISTINCT/GROUP to avoid getting duplicate + # X which make muledit view failing for instance (html validation fails + # because of some duplicate "id" attributes) + yield req.execute('DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is %s, Y is %s' % (etype1, etype2)) + # test some application-specific queries if defined + for rql in self.application_rql: + yield req.execute(rql) def _test_everything_for(self, rset): """this method tries to find everything that can be tested @@ -1214,8 +1421,8 @@ ## startup views def test_startup_views(self): for vid in self.list_startup_views(): - req = self.request() - yield self.view, vid, None, req + with self.admin_access.web_request() as req: + yield self.view, vid, None, req # registry instrumentization ################################################### diff -r 84738d495ffd -r 793377697c81 devtools/webtest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/webtest.py Wed Sep 24 18:04:30 2014 +0200 @@ -0,0 +1,36 @@ +from __future__ import absolute_import + +import webtest + +from cubicweb.wsgi import handler +from cubicweb.devtools.testlib import CubicWebTC + + +class CubicWebTestTC(CubicWebTC): + @classmethod + def init_config(cls, config): + super(CubicWebTestTC, cls).init_config(config) + config.global_set_option('base-url', 'http://localhost.local/') + + def setUp(self): + super(CubicWebTestTC, self).setUp() + webapp = handler.CubicWebWSGIApplication(self.config) + self.webapp = webtest.TestApp(webapp) + + def tearDown(self): + del self.webapp + super(CubicWebTestTC, self).tearDown() + + def login(self, user=None, password=None, **args): + if user is None: + user = self.admlogin + if password is None: + password = self.admpassword if user == self.admlogin else user + args.update({ + '__login': user, + '__password': password + }) + return self.webapp.get('/login', args) + + def logout(self): + return self.webapp.get('/logout') diff -r 84738d495ffd -r 793377697c81 doc/3.19.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/3.19.rst Wed Sep 24 18:04:30 2014 +0200 @@ -0,0 +1,180 @@ +What's new in CubicWeb 3.19? +============================ + +New functionalities +-------------------- + +* implement Cross Origin Resource Sharing (CORS) + (see `#2491768 `_) + +* system_source.create_eid can get a range of IDs, to reduce overhead of batch + entity creation + +Behaviour Changes +----------------- + +* The anonymous property of Session and Connection are now computed from the + related user login. If it matches the ``anonymous-user`` in the config the + connection is anonymous. Beware that the ``anonymous-user`` config is web + specific. Therefore, no session may be anonymous in a repository only setup. + + +New Repository Access API +------------------------- + +Connection replaces Session +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A new explicit Connection object replaces Session as the main repository entry +point. Connection holds all the necessary methods to be used server-side +(``execute``, ``commit``, ``rollback``, ``call_service``, ``entity_from_eid``, +etc...). One obtains a new Connection object using ``session.new_cnx()``. +Connection objects need to have an explicit begin and end. Use them as a context +manager to never miss an end:: + + with session.new_cnx() as cnx: + cnx.execute('INSERT Elephant E, E name "Babar"') + cnx.commit() + cnx.execute('INSERT Elephant E, E name "Celeste"') + cnx.commit() + # Once you get out of the "with" clause, the connection is closed. + +Using the same Connection object in multiple threads will give you access to the +same Transaction. However, Connection objects are not thread safe (hence at your +own risks). + +``repository.internal_session`` is deprecated in favor of +``repository.internal_cnx``. Note that internal connections are now `safe` by default, +i.e. the integrity hooks are enabled. + +Backward compatibility is preserved on Session. + + +dbapi vs repoapi +~~~~~~~~~~~~~~~~ + +A new API has been introduced to replace the dbapi. It is called `repoapi`. + +There are three relevant functions for now: + +* ``repoapi.get_repository`` returns a Repository object either from an + URI when used as ``repoapi.get_repository(uri)`` or from a config + when used as ``repoapi.get_repository(config=config)``. + +* ``repoapi.connect(repo, login, **credentials)`` returns a ClientConnection + associated with the user identified by the credentials. The + ClientConnection is associated with its own Session that is closed + when the ClientConnection is closed. A ClientConnection is a + Connection-like object to be used client side. + +* ``repoapi.anonymous_cnx(repo)`` returns a ClientConnection associated + with the anonymous user if described in the config. + + +repoapi.ClientConnection replace dbapi.Connection and company +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +On the client/web side, the Request is now using a ``repoapi.ClientConnection`` +instead of a ``dbapi.connection``. The ``ClientConnection`` has multiple backward +compatible methods to make it look like a ``dbapi.Cursor`` and ``dbapi.Connection``. + +Session used on the Web side are now the same than the one used Server side. +Some backward compatibility methods have been installed on the server side Session +to ease the transition. + +The authentication stack has been altered to use the ``repoapi`` instead of +the ``dbapi``. Cubes adding new element to this stack are likely to break. + +Session data can be accessed using the cnx.data dictionary, while +transaction data is available through cnx.transaction_data. These +replace the [gs]et_shared_data methods with optional txid kwarg. + +New API in tests +~~~~~~~~~~~~~~~~ + +All current methods and attributes used to access the repo on ``CubicWebTC`` are +deprecated. You may now use a ``RepoAccess`` object. A ``RepoAccess`` object is +linked to a new ``Session`` for a specified user. It is able to create +``Connection``, ``ClientConnection`` and web side requests linked to this +session:: + + access = self.new_access('babar') # create a new RepoAccess for user babar + with access.repo_cnx() as cnx: + # some work with server side cnx + cnx.execute(...) + cnx.commit() + cnx.execute(...) + cnx.commit() + + with access.client_cnx() as cnx: + # some work with client side cnx + cnx.execute(...) + cnx.commit() + + with access.web_request(elephant='babar') as req: + # some work with client side cnx + elephant_name = req.form['elephant'] + req.execute(...) + req.cnx.commit() + +By default ``testcase.admin_access`` contains a ``RepoAccess`` object for the +default admin session. + + +API changes +----------- + +* ``RepositorySessionManager.postlogin`` is now called with two arguments, + request and session. And this now happens before the session is linked to the + request. + +* ``SessionManager`` and ``AuthenticationManager`` now take a repo object at + initialization time instead of a vreg. + +* The ``async`` argument of ``_cw.call_service`` has been dropped. All calls are + now synchronous. The zmq notification bus looks like a good replacement for + most async use cases. + +* ``repo.stats()`` is now deprecated. The same information is available through + a service (``_cw.call_service('repo_stats')``). + +* ``repo.gc_stats()`` is now deprecated. The same information is available through + a service (``_cw.call_service('repo_gc_stats')``). + +* ``repo.register_user()`` is now deprecated. The functionality is now + available through a service (``_cw.call_service('register_user')``). + +* ``request.set_session`` no longer takes an optional ``user`` argument. + +* CubicwebTC does not have repo and cnx as class attributes anymore. They are + standard instance attributes. ``set_cnx`` and ``_init_repo`` class methods + become instance methods. + +* ``set_cnxset`` and ``free_cnxset`` are deprecated. cnxset are now + automatically managed. + +* The implementation of cascading deletion when deleting `composite` + entities has changed. There comes a semantic change: merely deleting + a composite relation does not entail any more the deletion of the + component side of the relation. + +* ``_cw.user_callback`` and ``_cw.user_rql_callback`` are deprecated. Users + are encouraged to write an actual controller (e.g. using ``ajaxfunc``) + instead of storing a closure in the session data. + +* A new ``entity.cw_linkable_rql`` method provides the rql to fetch all entities + that are already or may be related to the current entity using the given + relation. + + +Deprecated Code Drops +---------------------- + +* session.hijack_user mechanism has been dropped. + +* EtypeRestrictionComponent has been removed, its functionality has been + replaced by facets a while ago. + +* the old multi-source support has been removed. Only copy-based sources + remain, such as datafeed or ldapfeed. + diff -r 84738d495ffd -r 793377697c81 doc/book/en/admin/cubicweb-ctl.rst --- a/doc/book/en/admin/cubicweb-ctl.rst Wed Sep 24 17:35:59 2014 +0200 +++ b/doc/book/en/admin/cubicweb-ctl.rst Wed Sep 24 18:04:30 2014 +0200 @@ -37,7 +37,7 @@ cubicweb-ctl newcube This will create a new cube in -``/path/to/forest/cubicweb/cubes/`` for a Mercurial forest +``/path/to/grshell-cubicweb/cubes/`` for a Mercurial installation, or in ``/usr/share/cubicweb/cubes`` for a debian packages installation. diff -r 84738d495ffd -r 793377697c81 doc/book/en/admin/instance-config.rst --- a/doc/book/en/admin/instance-config.rst Wed Sep 24 17:35:59 2014 +0200 +++ b/doc/book/en/admin/instance-config.rst Wed Sep 24 18:04:30 2014 +0200 @@ -189,3 +189,38 @@ :`navigation.combobox-limit`: number of entities unrelated to show up on the drop-down lists of the sight on an editing entity view + +Cross-Origin Resource Sharing +----------------------------- + +CubicWeb provides some support for the CORS_ protocol. For now, the +provided implementation only deals with access to a CubicWeb instance +as a whole. Support for a finer granularity may be considered in the +future. + +Specificities of the provided implementation: + +- ``Access-Control-Allow-Credentials`` is always true +- ``Access-Control-Allow-Origin`` header in response will never be + ``*`` +- ``Access-Control-Expose-Headers`` can be configured globally (see below) +- ``Access-Control-Max-Age`` can be configured globally (see below) +- ``Access-Control-Allow-Methods`` can be configured globally (see below) +- ``Access-Control-Allow-Headers`` can be configured globally (see below) + + +A few parameters can be set to configure the CORS_ capabilities of CubicWeb. + +.. _CORS: http://www.w3.org/TR/cors/ + +:`access-control-allow-origin`: + comma-separated list of allowed origin domains or "*" for any domain +:`access-control-allow-methods`: + comma-separated list of allowed HTTP methods +:`access-control-max-age`: + maximum age of cross-origin resource sharing (in seconds) +:`access-control-allow-headers`: + comma-separated list of allowed HTTP custom headers (used in simple requests) +:`access-control-expose-headers`: + comma-separated list of allowed HTTP custom headers (used in preflight requests) + diff -r 84738d495ffd -r 793377697c81 doc/book/en/admin/ldap.rst --- a/doc/book/en/admin/ldap.rst Wed Sep 24 17:35:59 2014 +0200 +++ b/doc/book/en/admin/ldap.rst Wed Sep 24 18:04:30 2014 +0200 @@ -85,7 +85,9 @@ If the LDAP server accepts anonymous binds, then it is possible to leave data-cnx-dn and data-cnx-password empty. This is, however, quite -unlikely in practice. +unlikely in practice. Beware that the LDAP server might hide attributes +such as "userPassword" while the rest of the attributes remain visible +through an anonymous binding. LDAP schema mapping options: diff -r 84738d495ffd -r 793377697c81 doc/book/en/admin/setup-windows.rst --- a/doc/book/en/admin/setup-windows.rst Wed Sep 24 17:35:59 2014 +0200 +++ b/doc/book/en/admin/setup-windows.rst Wed Sep 24 18:04:30 2014 +0200 @@ -23,12 +23,12 @@ |cubicweb| requires some base elements that must be installed to run correctly. So, first of all, you must install them : -* python >= 2.5 and < 3 +* python >= 2.6 and < 3 (`Download Python `_). You can also consider the Python(x,y) distribution (`Download Python(x,y) `_) as it makes things easier for Windows user by wrapping in a single installer - python 2.5 plus numerous useful third-party modules and + python 2.7 plus numerous useful third-party modules and applications (including Eclipse + pydev, which is an arguably good IDE for Python under Windows). @@ -40,7 +40,7 @@ (version >=2.2.1) allows working with XML and HTML (`Download lxml `_) -* `Postgresql 8.4 `_, +* `Postgresql `_, an object-relational database system (`Download Postgresql `_) and its python drivers @@ -50,8 +50,7 @@ (`Download gettext `_). * `rql `_, - the recent version of the Relationship Query Language parser - (`Download rql `_). + the recent version of the Relationship Query Language parser. Install optional elements ------------------------- @@ -59,16 +58,6 @@ We recommend you to install the following elements. They are not mandatory but they activate very interesting features in |cubicweb|: -* `Simplejson `_ - must be installed if you have python <= 2.5 - (`Download simplejson `_). - It is included in the Standard library from Python >= 2.6. - -* `Pyro `_ - enables remote access to cubicweb repository instances. - It also allows the client and the server not running on the same machine - (`Download Pyro `_). - * `python-ldap `_ provides access to LDAP/Active directory directories (`Download python-ldap `_). diff -r 84738d495ffd -r 793377697c81 doc/book/en/annexes/depends.rst --- a/doc/book/en/annexes/depends.rst Wed Sep 24 17:35:59 2014 +0200 +++ b/doc/book/en/annexes/depends.rst Wed Sep 24 18:04:30 2014 +0200 @@ -6,7 +6,7 @@ ========================= When you run CubicWeb from source, either by downloading the tarball or -cloning the mercurial forest, here is the list of tools and libraries you need +cloning the mercurial tree, here is the list of tools and libraries you need to have installed in order for CubicWeb to work: * yapps - http://theory.stanford.edu/~amitp/yapps/ - @@ -15,61 +15,43 @@ * pygraphviz - http://networkx.lanl.gov/pygraphviz/ - http://pypi.python.org/pypi/pygraphviz -* simplejson - http://code.google.com/p/simplejson/ - - http://pypi.python.org/pypi/simplejson - -* docsutils - http://docutils.sourceforge.net/ - http://pypi.python.org/pypi/docutils +* docutils - http://docutils.sourceforge.net/ - http://pypi.python.org/pypi/docutils * lxml - http://codespeak.net/lxml - http://pypi.python.org/pypi/lxml * twisted - http://twistedmatrix.com/ - http://pypi.python.org/pypi/Twisted * logilab-common - http://www.logilab.org/project/logilab-common - - http://pypi.python.org/pypi/logilab-common/ - included in the forest + http://pypi.python.org/pypi/logilab-common/ * logilab-database - http://www.logilab.org/project/logilab-database - - http://pypi.python.org/pypi/logilab-database/ - included in the forest + http://pypi.python.org/pypi/logilab-database/ * logilab-constraint - http://www.logilab.org/project/logilab-constraint - - http://pypi.python.org/pypi/constraint/ - included in the forest + http://pypi.python.org/pypi/constraint/ * logilab-mtconverter - http://www.logilab.org/project/logilab-mtconverter - - http://pypi.python.org/pypi/logilab-mtconverter - included in the forest + http://pypi.python.org/pypi/logilab-mtconverter -* rql - http://www.logilab.org/project/rql - http://pypi.python.org/pypi/rql - - included in the forest +* rql - http://www.logilab.org/project/rql - http://pypi.python.org/pypi/rql * yams - http://www.logilab.org/project/yams - http://pypi.python.org/pypi/yams - - included in the forest * indexer - http://www.logilab.org/project/indexer - - http://pypi.python.org/pypi/indexer - included in the forest + http://pypi.python.org/pypi/indexer * passlib - https://code.google.com/p/passlib/ - http://pypi.python.org/pypi/passlib -To use network communication between cubicweb instances / clients: - -* Pyro - http://www.xs4all.nl/~irmen/pyro3/ - http://pypi.python.org/pypi/Pyro - -If you're using a Postgres database (recommended): +If you're using a Postgresql database (recommended): * psycopg2 - http://initd.org/projects/psycopg2 - http://pypi.python.org/pypi/psycopg2 * plpythonu extension -* tsearch2 extension (for postgres < 8.3, in postgres-contrib) Other optional packages: * fyzz - http://www.logilab.org/project/fyzz - - http://pypi.python.org/pypi/fyzz - included in the forest, *to activate Sparql querying* - -For the google-appengine extension to be available, you also need: - -* vobject - http://vobject.skyhouseconsulting.com/ - - http://pypi.python.org/pypi/vobject, *for the icalendar view*. For those not - benefiting from a packaging system, note that vobject itself depends on - dateutil - http://labix.org/python-dateutil - - http://pypi.python.org/pypi/python-dateutil/. + http://pypi.python.org/pypi/fyzz *to activate Sparql querying* Any help with the packaging of CubicWeb for more than Debian/Ubuntu (including diff -r 84738d495ffd -r 793377697c81 doc/book/en/annexes/mercurial.rst --- a/doc/book/en/annexes/mercurial.rst Wed Sep 24 17:35:59 2014 +0200 +++ b/doc/book/en/annexes/mercurial.rst Wed Sep 24 18:04:30 2014 +0200 @@ -18,13 +18,13 @@ .. _Mercurial: http://www.selenic.com/mercurial/ -In contrast to CVS/Subversion, we usually create a repository by +In contrast to CVS/Subversion, we usually create a repository per project to manage. In a collaborative development, we usually create a central repository accessible to all developers of the project. These central repository is used -as a reference. According to its needs, then everyone can have a local repository, -that you will have to synchronize with the central repository from time to time. +as a reference. According to their needs, everyone can have a local repository, +that they will have to synchronize with the central repository from time to time. Major commands @@ -33,7 +33,7 @@ hg clone ssh://myhost//home/src/repo -* See the contents of the local repository (graphical tool in Tk):: +* See the contents of the local repository (graphical tool in Qt):: hgview @@ -111,17 +111,15 @@ 3. `hg ci` 4. `hg push` -Installation of the forest extension -```````````````````````````````````` +Installation of the guestrepo extension +``````````````````````````````````````` -Set up the forest extension by getting a copy of the sources -from http://hg.akoha.org/hgforest/ and adding the following +Set up the guestrepo extension by getting a copy of the sources +from https://bitbucket.org/selinc/guestrepo and adding the following lines to your ``~/.hgrc``: :: [extensions] - hgext.forest= - # or, if forest.py is not in the hgext dir: - # forest=/path/to/forest.py + guestrepo=/path/to/guestrepo/guestrepo More information diff -r 84738d495ffd -r 793377697c81 doc/book/en/devrepo/profiling.rst --- a/doc/book/en/devrepo/profiling.rst Wed Sep 24 17:35:59 2014 +0200 +++ b/doc/book/en/devrepo/profiling.rst Wed Sep 24 18:04:30 2014 +0200 @@ -10,7 +10,7 @@ queries. In your ``all-in-one.conf`` file, set the **query-log-file** option:: # web application query log file - query-log-file=~/myapp-rql.log + query-log-file=/home/user/myapp-rql.log Then restart your application, reload your page and stop your application. The file ``myapp-rql.log`` now contains the list of RQL queries that were @@ -28,7 +28,7 @@ .. sourcecode:: sh - $ cubicweb-ctl exlog ~/myapp-rql.log + $ cubicweb-ctl exlog /home/user/myapp-rql.log 0.07 50 Any A WHERE X eid %(x)s, X firstname A {} 0.05 50 Any A WHERE X eid %(x)s, X lastname A {} 0.01 1 Any X,AA ORDERBY AA DESC WHERE E eid %(x)s, E employees X, X modification_date AA {} diff -r 84738d495ffd -r 793377697c81 doc/book/en/devrepo/repo/sessions.rst --- a/doc/book/en/devrepo/repo/sessions.rst Wed Sep 24 17:35:59 2014 +0200 +++ b/doc/book/en/devrepo/repo/sessions.rst Wed Sep 24 18:04:30 2014 +0200 @@ -3,50 +3,47 @@ Sessions ======== -Sessions are object carrying the `.execute` method to query the data -sources. +Sessions are objects linked to an authenticated user. The `Session.new_cnx` +method returns a new Connection linked to that session. + +Connections +=========== -Kinds of sessions ------------------ +Connections provide the `.execute` method to query the data sources. -There are two kinds of sessions. +Kinds of connections +-------------------- -* `normal sessions` are the most common: they are related to users and +There are two kinds of connections. + +* `normal connections` are the most common: they are related to users and carry security checks coming with user credentials -* `internal sessions` have all the powers; they are also used in only a +* `internal connections` have all the powers; they are also used in only a few situations where you don't already have an adequate session at hand, like: user authentication, data synchronisation in multi-source contexts -.. note:: - Do not confuse the session type with their connection mode, for - instance : `in memory` or `pyro`. - -Normal sessions are typically named `_cw` in most appobjects or +Normal connections are typically named `_cw` in most appobjects or sometimes just `session`. -Internal sessions are available from the `Repository` object and are +Internal connections are available from the `Repository` object and are to be used like this: .. sourcecode:: python - session = self.repo.internal_session() - try: - do_stuff_with(session) - finally: - session.close() + with self.repo.internal_cnx() as cnx: + do_stuff_with(cnx) + cnx.commit() -.. warning:: - Do not forget to close such a session after use for a session leak - will quickly lead to an application crash. +Connections should always be used as context managers, to avoid leaks. Authentication and management of sessions ----------------------------------------- The authentication process is a ballet involving a few dancers: -* through its `connect` method the top-level application object (the +* through its `get_session` method the top-level application object (the `CubicWebPublisher`) will open a session whenever a web request comes in; it asks the `session manager` to open a session (giving the web request object as context) using `open_session` @@ -88,7 +85,7 @@ ------------------------------ Sometimes CubicWeb's out-of-the-box authentication schemes (cookie and -http) are not sufficient. Nowadays there is a plethore of such schemes +http) are not sufficient. Nowadays there is a plethora of such schemes and the framework cannot provide them all, but as the sequence above shows, it is extensible. @@ -154,7 +151,7 @@ .. sourcecode:: python - class XFooUserRetriever(authentication.LoginPasswordRetreiver): + class XFooUserRetriever(authentication.LoginPasswordRetriever): """ authenticate by the x-foo-user http header or just do normal login/password authentication """ @@ -200,7 +197,8 @@ return 1 return 0 -Full API Session -~~~~~~~~~~~~~~~~~~~~~~~~~~ +Full Session and Connection API +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. autoclass:: cubicweb.server.session.Session +.. autoclass:: cubicweb.server.session.Connection diff -r 84738d495ffd -r 793377697c81 doc/book/en/devrepo/testing.rst --- a/doc/book/en/devrepo/testing.rst Wed Sep 24 17:35:59 2014 +0200 +++ b/doc/book/en/devrepo/testing.rst Wed Sep 24 18:04:30 2014 +0200 @@ -25,8 +25,8 @@ http://docs.python.org/library/sqlite3.html) as a backend. The database is stored in the mycube/test/tmpdb, -mycube/test/tmpdb-template files. If it does not (yet) exists, it will -be built automatically when the test suit starts. +mycube/test/tmpdb-template files. If it does not (yet) exist, it will +be built automatically when the test suite starts. .. warning:: @@ -34,8 +34,8 @@ one must delete these two files. Changes concerned only with entity or relation type properties (constraints, cardinalities, permissions) and generally dealt with using the - `sync_schema_props_perms()` fonction of the migration environment - need not a database regeneration step. + `sync_schema_props_perms()` function of the migration environment do + not need a database regeneration step. .. _hook_test: @@ -53,56 +53,58 @@ class ClassificationHooksTC(CubicWebTC): def setup_database(self): - req = self.request() - group_etype = req.find_one_entity('CWEType', name='CWGroup') - c1 = req.create_entity('Classification', name=u'classif1', - classifies=group_etype) - user_etype = req.find_one_entity('CWEType', name='CWUser') - c2 = req.create_entity('Classification', name=u'classif2', - classifies=user_etype) - self.kw1 = req.create_entity('Keyword', name=u'kwgroup', included_in=c1) - self.kw2 = req.create_entity('Keyword', name=u'kwuser', included_in=c2) + with self.admin_access.repo_cnx() as cnx: + group_etype = cnx.find('CWEType', name='CWGroup').one() + c1 = cnx.create_entity('Classification', name=u'classif1', + classifies=group_etype) + user_etype = cnx.find('CWEType', name='CWUser').one() + c2 = cnx.create_entity('Classification', name=u'classif2', + classifies=user_etype) + self.kw1eid = cnx.create_entity('Keyword', name=u'kwgroup', included_in=c1).eid + cnx.commit() def test_cannot_create_cycles(self): - # direct obvious cycle - self.assertRaises(ValidationError, self.kw1.cw_set, - subkeyword_of=self.kw1) - # testing indirect cycles - kw3 = self.execute('INSERT Keyword SK: SK name "kwgroup2", SK included_in C, ' - 'SK subkeyword_of K WHERE C name "classif1", K eid %s' - % self.kw1.eid).get_entity(0,0) - self.kw1.cw_set(subkeyword_of=kw3) - self.assertRaises(ValidationError, self.commit) + with self.admin_access.repo_cnx() as cnx: + kw1 = cnx.entity_from_eid(self.kw1eid) + # direct obvious cycle + with self.assertRaises(ValidationError): + kw1.cw_set(subkeyword_of=kw1) + cnx.rollback() + # testing indirect cycles + kw3 = cnx.execute('INSERT Keyword SK: SK name "kwgroup2", SK included_in C, ' + 'SK subkeyword_of K WHERE C name "classif1", K eid %(k)s' + {'k': kw1}).get_entity(0,0) + kw3.cw_set(reverse_subkeyword_of=kw1) + self.assertRaises(ValidationError, cnx.commit) The test class defines a :meth:`setup_database` method which populates the database with initial data. Each test of the class runs with this -pre-populated database. A commit is done automatically after the -:meth:`setup_database` call. You don't have to call it explicitely. +pre-populated database. -The test case itself checks that an Operation does it job of +The test case itself checks that an Operation does its job of preventing cycles amongst Keyword entities. -`create_entity` is a useful method, which easily allows to create an -entity. You can link this entity to others entities, by specifying as -argument, the relation name, and the entity to link, as value. In the -above example, the `Classification` entity is linked to a `CWEtype` -via the relation `classifies`. Conversely, if you are creating a -`CWEtype` entity, you can link it to a `Classification` entity, by -adding `reverse_classifies` as argument. +The `create_entity` method of connection (or request) objects allows +to create an entity. You can link this entity to other entities, by +specifying as argument, the relation name, and the entity to link, as +value. In the above example, the `Classification` entity is linked to +a `CWEtype` via the relation `classifies`. Conversely, if you are +creating a `CWEtype` entity, you can link it to a `Classification` +entity, by adding `reverse_classifies` as argument. .. note:: - :meth:`commit` method is not called automatically in test_XXX - methods. You have to call it explicitely if needed (notably to test - operations). It is a good practice to call :meth:`clear_all_caches` - on entities after a commit to avoid request cache effects. + the :meth:`commit` method is not called automatically. You have to + call it explicitly if needed (notably to test operations). It is a + good practice to regenerate entities with :meth:`entity_from_eid` + after a commit to avoid request cache effects. You can see an example of security tests in the :ref:`adv_tuto_security`. It is possible to have these tests run continuously using `apycot`_. -.. _apycot: http://www.logilab.org/project/apycot +.. _apycot: http://www.cubicweb.org/project/apycot .. _securitytest: @@ -113,66 +115,50 @@ support multiple connections at a time, you must be careful when simulating security, changing users. -By default, tests run with a user with admin privileges. This -user/connection must never be closed. +By default, tests run with a user with admin privileges. Connections +using these credentials are accessible through the `admin_access` object +of the test classes. -Before a self.login, one has to release the connection pool in use -with a self.commit, self.rollback or self.close. - -The `login` method returns a connection object that can be used as a +The `repo_cnx()` method returns a connection object that can be used as a context manager: .. sourcecode:: python - with self.login('user1') as user: - req = user.req - req.execute(...) - -On exit of the context manager, either a commit or rollback is issued, -which releases the connection. - -When one is logged in as a normal user and wants to switch back to the -admin user without committing, one has to use -self.restore_connection(). - -Usage with restore_connection: - -.. sourcecode:: python + # admin_access is a pre-cooked session wrapping object + # it is built with: + # self.admin_access = self.new_access('admin') + with self.admin_access.repo_cnx() as cnx: + cnx.execute(...) + self.create_user(cnx, login='user1') + cnx.commit() - # execute using default admin connection - self.execute(...) - # I want to login with another user, ensure to free admin connection pool - # (could have used rollback but not close here - # we should never close defaut admin connection) - self.commit() - cnx = self.login('user') - # execute using user connection - self.execute(...) - # I want to login with another user or with admin user - self.commit(); cnx.close() - # restore admin connection, never use cnx = self.login('admin'), it will return - # the default admin connection and one may be tempted to close it - self.restore_connection() + user1access = self.new_access('user1') + with user1access.web_request() as req: + req.execute(...) + req.cnx.commit() + +On exit of the context manager, a rollback is issued, which releases +the connection. Don't forget to issue the `cnx.commit()` calls! .. warning:: - Do not use the references kept to the entities created with a - connection from another ! + Do not use references kept to the entities created with a + connection from another one! Email notifications tests ````````````````````````` -When running tests potentially generated e-mails are not really sent -but is found in the list `MAILBOX` of module +When running tests, potentially generated e-mails are not really sent +but are found in the list `MAILBOX` of module :mod:`cubicweb.devtools.testlib`. You can test your notifications by analyzing the contents of this list, which contains objects with two attributes: * `recipients`, the list of recipients -* `msg`, object email.Message +* `msg`, email.Message object -Let us look at simple example from the ``blog`` cube. +Let us look at a simple example from the ``blog`` cube. .. sourcecode:: python @@ -182,28 +168,28 @@ """test blog specific behaviours""" def test_notifications(self): - req = self.request() - cubicweb_blog = req.create_entity('Blog', title=u'cubicweb', - description=u'cubicweb is beautiful') - blog_entry_1 = req.create_entity('BlogEntry', title=u'hop', - content=u'cubicweb hop') - blog_entry_1.cw_set(entry_of=cubicweb_blog) - blog_entry_2 = req.create_entity('BlogEntry', title=u'yes', - content=u'cubicweb yes') - blog_entry_2.cw_set(entry_of=cubicweb_blog) - self.assertEqual(len(MAILBOX), 0) - self.commit() - self.assertEqual(len(MAILBOX), 2) - mail = MAILBOX[0] - self.assertEqual(mail.subject, '[data] hop') - mail = MAILBOX[1] - self.assertEqual(mail.subject, '[data] yes') + with self.admin_access.web_request() as req: + cubicweb_blog = req.create_entity('Blog', title=u'cubicweb', + description=u'cubicweb is beautiful') + blog_entry_1 = req.create_entity('BlogEntry', title=u'hop', + content=u'cubicweb hop') + blog_entry_1.cw_set(entry_of=cubicweb_blog) + blog_entry_2 = req.create_entity('BlogEntry', title=u'yes', + content=u'cubicweb yes') + blog_entry_2.cw_set(entry_of=cubicweb_blog) + self.assertEqual(len(MAILBOX), 0) + req.cnx.commit() + self.assertEqual(len(MAILBOX), 2) + mail = MAILBOX[0] + self.assertEqual(mail.subject, '[data] hop') + mail = MAILBOX[1] + self.assertEqual(mail.subject, '[data] yes') Visible actions tests ````````````````````` It is easy to write unit tests to test actions which are visible to -user or to a category of users. Let's take an example in the +a user or to a category of users. Let's take an example in the `conference cube`_. .. _`conference cube`: http://www.cubicweb.org/project/cubicweb-conference @@ -212,34 +198,35 @@ class ConferenceActionsTC(CubicWebTC): def setup_database(self): - self.conf = self.create_entity('Conference', - title=u'my conf', - url_id=u'conf', - start_on=date(2010, 1, 27), - end_on = date(2010, 1, 29), - call_open=True, - reverse_is_chair_at=chair, - reverse_is_reviewer_at=reviewer) + with self.admin_access.repo_cnx() as cnx: + self.confeid = cnx.create_entity('Conference', + title=u'my conf', + url_id=u'conf', + start_on=date(2010, 1, 27), + end_on = date(2010, 1, 29), + call_open=True, + reverse_is_chair_at=chair, + reverse_is_reviewer_at=reviewer).eid def test_admin(self): - req = self.request() - rset = req.find_entities('Conference') - self.assertListEqual(self.pactions(req, rset), - [('workflow', workflow.WorkflowActions), - ('edit', confactions.ModifyAction), - ('managepermission', actions.ManagePermissionsAction), - ('addrelated', actions.AddRelatedActions), - ('delete', actions.DeleteAction), - ('generate_badge_action', badges.GenerateBadgeAction), - ('addtalkinconf', confactions.AddTalkInConferenceAction) - ]) - self.assertListEqual(self.action_submenu(req, rset, 'addrelated'), - [(u'add Track in_conf Conference object', - u'http://testing.fr/cubicweb/add/Track' - u'?__linkto=in_conf%%3A%(conf)s%%3Asubject&' - u'__redirectpath=conference%%2Fconf&' - u'__redirectvid=' % {'conf': self.conf.eid}), - ]) + with self.admin_access.web_request() as req: + rset = req.find('Conference').one() + self.assertListEqual(self.pactions(req, rset), + [('workflow', workflow.WorkflowActions), + ('edit', confactions.ModifyAction), + ('managepermission', actions.ManagePermissionsAction), + ('addrelated', actions.AddRelatedActions), + ('delete', actions.DeleteAction), + ('generate_badge_action', badges.GenerateBadgeAction), + ('addtalkinconf', confactions.AddTalkInConferenceAction) + ]) + self.assertListEqual(self.action_submenu(req, rset, 'addrelated'), + [(u'add Track in_conf Conference object', + u'http://testing.fr/cubicweb/add/Track' + u'?__linkto=in_conf%%3A%(conf)s%%3Asubject&' + u'__redirectpath=conference%%2Fconf&' + u'__redirectvid=' % {'conf': self.confeid}), + ]) You just have to execute a rql query corresponding to the view you want to test, and to compare the result of @@ -247,7 +234,7 @@ that must be visible in the interface. This is a list of tuples. The first element is the action's `__regid__`, the second the action's class. -To test actions in submenu, you just have to test the result of +To test actions in a submenu, you just have to test the result of :meth:`~cubicweb.devtools.testlib.CubicWebTC.action_submenu` method. The last parameter of the method is the action's category. The result is a list of tuples. The first element is the action's title, and the second element the @@ -290,23 +277,27 @@ Cache heavy database setup ------------------------------- -Some tests suite require a complex setup of the database that takes seconds (or -event minutes) to complete. Doing the whole setup for all individual tests make -the whole run very slow. The ``CubicWebTC`` class offer a simple way to prepare -specific database once for multiple tests. The `test_db_id` class attribute of -your ``CubicWebTC`` must be set a unique identifier and the -:meth:`pre_setup_database` class method build the cached content. As the -:meth:`pre_setup_database` method is not grantee to be called, you must not set -any class attribut to be used during test there. Databases for each `test_db_id` -are automatically created if not already in cache. Clearing the cache is up to -the user. Cache files are found in the :file:`data/database` subdirectory of your -test directory. +Some test suites require a complex setup of the database that takes +seconds (or even minutes) to complete. Doing the whole setup for each +individual test makes the whole run very slow. The ``CubicWebTC`` +class offer a simple way to prepare a specific database once for +multiple tests. The `test_db_id` class attribute of your +``CubicWebTC`` subclass must be set to a unique identifier and the +:meth:`pre_setup_database` class method must build the cached content. As +the :meth:`pre_setup_database` method is not garanteed to be called +every time a test method is run, you must not set any class attribute +to be used during test *there*. Databases for each `test_db_id` are +automatically created if not already in cache. Clearing the cache is +up to the user. Cache files are found in the :file:`data/database` +subdirectory of your test directory. .. warning:: - Take care to always have the same :meth:`pre_setup_database` function for all - call with a given `test_db_id` otherwise you test will have unpredictable - result given the first encountered one. + Take care to always have the same :meth:`pre_setup_database` + function for all classes with a given `test_db_id` otherwise your + tests will have unpredictable results depending on the first + encountered one. + Testing on a real-life database ------------------------------- @@ -332,10 +323,10 @@ sourcefile='/path/to/realdb_sources') def test_blog_rss(self): - req = self.request() + with self.admin_access.web_request() as req: rset = req.execute('Any B ORDERBY D DESC WHERE B is BlogEntry, ' - 'B created_by U, U login "logilab", B creation_date D') - self.view('rss', rset) + 'B created_by U, U login "logilab", B creation_date D') + self.view('rss', rset, req=req) Testing with other cubes @@ -351,7 +342,7 @@ The format is: * possibly several empy lines or lines starting with ``#`` (comment lines) -* one line containing a coma separated list of cube names. +* one line containing a comma-separated list of cube names. It is also possible to add a ``schema.py`` file in ``mycube/test/data``, which will be used by the testing framework, @@ -362,12 +353,12 @@ -------------------- CubicWeb provides some literate programming capabilities. The :ref:`cubicweb-ctl` -`shell` command accepts differents format files. If your file ends with `.txt` -or `.rst`, the file will be parsed by :mod:`doctest.testfile` with CubicWeb +`shell` command accepts different file formats. If your file ends with `.txt` +or `.rst`, the file will be parsed by :mod:`doctest.testfile` with CubicWeb's :ref:`migration` API enabled in it. -Create a `scenario.txt` file into `test/` directory and fill with some content. -Please refer the :mod:`doctest.testfile` `documentation`_. +Create a `scenario.txt` file in the `test/` directory and fill with some content. +Refer to the :mod:`doctest.testfile` `documentation`_. .. _documentation: http://docs.python.org/library/doctest.html @@ -404,7 +395,7 @@ Passing paramaters `````````````````` -Using extra arguments to parametrize your scenario is possible by prepend them +Using extra arguments to parametrize your scenario is possible by prepending them by double dashes. Please refer to the `cubicweb-ctl shell --help` usage. @@ -431,7 +422,7 @@ discover them automatically) * launch `pytest unittest_foo.py` to execute one test file * launch `pytest unittest_foo.py bar` to execute all test methods and - all test cases whose name contain `bar` + all test cases whose name contains `bar` Additionally, the `-x` option tells pytest to exit at the first error or failure. The `-i` option tells pytest to drop into pdb whenever an @@ -460,42 +451,43 @@ What you need to know about request and session ----------------------------------------------- - .. image:: ../images/request_session.png First, remember to think that some code run on a client side, some other on the repository side. More precisely: -* client side: web interface, raw db-api connection (cubicweb-ctl shell for +* client side: web interface, raw repoapi connection (cubicweb-ctl shell for instance); * repository side: RQL query execution, that may trigger hooks and operation. -The client interact with the repository through a db-api connection. +The client interacts with the repository through a repoapi connection. -A db-api connection is tied to a session in the repository. The connection and -request objects are unaccessible from repository code / the session object is -unaccessible from client code (theorically at least). +.. note:: + + These distinctions are going to disappear in cubicweb 3.21 (if not + before). -The :mod:`cubicweb.dbapi` module provides a base request class. The web interface -provides an extended request class. +A repoapi connection is tied to a session in the repository. The connection and +request objects are inaccessible from repository code / the session object is +inaccessible from client code (theoretically at least). - -The `request` object provides access to all cubicweb resources, eg: +The web interface provides a request class. That `request` object provides +access to all cubicweb resources, eg: * the registry (which itself provides access to the schema and the configuration); -* an underlying db-api connection (when using req.execute, you actually call the - db-api); +* an underlying repoapi connection (when using req.execute, you actually call the + repoapi); * other specific resources depending on the client type (url generation according to base url, form parameters, etc.). A `session` provides an api similar to a request regarding RQL execution and -access to global resources (registry and all), but also have the following +access to global resources (registry and all), but also has the following responsibilities: * handle transaction data, that will live during the time of a single @@ -510,72 +502,58 @@ The `_cw` attribute ``````````````````` The `_cw` attribute available on every application object provides access to all -cubicweb resources, eg: +cubicweb resources, i.e.: -For code running on the client side (eg web interface view), `_cw` is a request -instance. +- For code running on the client side (eg web interface view), `_cw` is a request + instance. -For code running on the repository side (hooks and operation), `_cw` is a session -instance. +- For code running on the repository side (hooks and operation), `_cw` is a + Connection or Session instance. -Beware some views may be called with a session (eg notifications) or with a -DB-API request. In the later case, see :meth:`use_web_compatible_requests` on -:class:`Connection` instances. +Beware some views may be called with a session (e.g. notifications) or with a +request. Request, session and transaction ```````````````````````````````` -In the web interface, an HTTP request is handle by a single request, which will -be thrown way once the response send. +In the web interface, an HTTP request is handled by a single request, which will +be thrown away once the response is sent. -The web publisher handle the transaction: +The web publisher handles the transaction: * commit / rollback is done automatically -* you should not commit / rollback explicitly -When using a raw db-api, you're on your own regarding transaction. - -On the other hand, db-api connection and session live from a user login to its logout. - -Because session lives for a long time, and database connections is a limited -resource, we can't bound a session to its own database connection for all its -lifetime. The repository handles a pool of connections (4 by default), and it's -responsible to attribute them as needed. +* you should not commit / rollback explicitly, except if you really + need it Let's detail the process: -1. an incoming RQL query comes from a client to the repository +1. an incoming RQL query comes from a client to the web stack -2. the repository attributes a database connection to the session +2. the web stack opens an authenticated database connection for the + request, which is associated to a user session -3. the repository's querier execute the query +3. the query is executed (through the repository connection) -4. this query may trigger hooks. Hooks and operation may execute some rql queries - through `_cw.execute`. Those queries go directly to the querier, hence don't - touch the database connection, they use the one attributed in 2. +4. this query may trigger hooks. Hooks and operations may execute some rql queries + through `cnx.execute`. -5. the repository's get the result of the query in 1. If it was a RQL read query, +5. the repository gets the result of the query in 1. If it was a RQL read query, the database connection is released. If it was a write query, the connection - is then tied to the session until the transaction is commited or rollbacked. + is then tied to the session until the transaction is commited or rolled back. 6. results are sent back to the client This implies several things: -* when using a request, or code executed in hooks, this database connection - handling is totally transparent +* when using a request, or code executed in hooks, this database + connection handling is totally transparent -* however, take care when writing test: you are usually faking / testing both the - server and the client side, so you have to decide when to use self.request() / - self.session. Ask yourself "where the code I want to test will be running, - client or repository side ?". The response is usually : use a request :) - However, if you really need using a session: - - - commit / rollback will free the database connection (unless explicitly told - not to do so). - - - if you issue a query after that without asking for a database connection - (`session.get_cnxset()`), you will end up with a 'None type has no attribute - source()' error +* however, take care when writing tests: you are usually faking / + testing both the server and the client side, so you have to decide + when to use RepoAccess.client_cnx or RepoAccess.repo_cnx. Ask + yourself "where will the code I want to test be running, client or + repository side?". The response is usually: use a repo (since the + "client connection" concept is going away in a couple of releases). diff -r 84738d495ffd -r 793377697c81 doc/book/en/devweb/js.rst --- a/doc/book/en/devweb/js.rst Wed Sep 24 17:35:59 2014 +0200 +++ b/doc/book/en/devweb/js.rst Wed Sep 24 18:04:30 2014 +0200 @@ -317,67 +317,6 @@ } -python/ajax dynamic callbacks -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -CubicWeb provides a way to dynamically register a function and make it -callable from the javascript side. The typical use case for this is a -situation where you have everything at hand to implement an action -(whether it be performing a RQL query or executing a few python -statements) that you'd like to defer to a user click in the web -interface. In other words, generate an HTML `` @@ -48,10 +48,10 @@ fit-margin-left="0" fit-margin-right="0" fit-margin-bottom="0" - inkscape:window-width="766" - inkscape:window-height="1151" - inkscape:window-x="1152" - inkscape:window-y="24" + inkscape:window-width="958" + inkscape:window-height="1160" + inkscape:window-x="0" + inkscape:window-y="38" inkscape:window-maximized="0" inkscape:snap-global="true" /> image/svg+xml - + @@ -113,7 +113,7 @@ sodipodi:role="line" id="tspan3763" x="262.63968" - y="470.51431">DB API + y="470.51431">REPOAPI _Any.py` ('Any' being there mostly for historical reason). +file named file:`_Any.py` ('Any' being there mostly for historical reasons). Here I'll create a *migration/0.2.0_Any.py* file containing the following instructions: @@ -423,11 +422,11 @@ add_relation_type('visibility') sync_schema_props_perms() -Then I update the version number in cube's *__pkginfo__.py* to 0.2.0. And +Then I update the version number in the cube's *__pkginfo__.py* to 0.2.0. And that's it! Those instructions will: * update the instance's schema by adding our two new relations and update the - underlying database tables accordingly (the two first instructions) + underlying database tables accordingly (the first two instructions) * update schema's permissions definition (the last instruction) diff -r 84738d495ffd -r 793377697c81 doc/book/en/tutorials/base/customizing-the-application.rst --- a/doc/book/en/tutorials/base/customizing-the-application.rst Wed Sep 24 17:35:59 2014 +0200 +++ b/doc/book/en/tutorials/base/customizing-the-application.rst Wed Sep 24 18:04:30 2014 +0200 @@ -26,7 +26,7 @@ cubicweb-ctl newcube myblog -This will create in the cubes directory (:file:`/path/to/forest/cubes` for source +This will create in the cubes directory (:file:`/path/to/grshell/cubes` for source installation, :file:`/usr/share/cubicweb/cubes` for Debian packages installation) a directory named :file:`blog` reflecting the structure described in :ref:`cubelayout`. diff -r 84738d495ffd -r 793377697c81 doc/tools/pyjsrest.py --- a/doc/tools/pyjsrest.py Wed Sep 24 17:35:59 2014 +0200 +++ b/doc/tools/pyjsrest.py Wed Sep 24 18:04:30 2014 +0200 @@ -142,7 +142,6 @@ FILES_TO_IGNORE = set([ 'jquery.js', 'jquery.treeview.js', - 'jquery.json.js', 'jquery.tablesorter.js', 'jquery.timePicker.js', 'jquery.flot.js', diff -r 84738d495ffd -r 793377697c81 entities/adapters.py --- a/entities/adapters.py Wed Sep 24 17:35:59 2014 +0200 +++ b/entities/adapters.py Wed Sep 24 18:04:30 2014 +0200 @@ -367,15 +367,3 @@ globalmsg = _('some relations violate a unicity constraint') rtypes_msg['unicity constraint'] = globalmsg raise ValidationError(self.entity.eid, rtypes_msg) - -# deprecated ################################################################### - - -class adapter_deprecated(view.auto_unwrap_bw_compat): - """metaclass to print a warning on instantiation of a deprecated class""" - - def __call__(cls, *args, **kwargs): - msg = getattr(cls, "__deprecation_warning__", - "%(cls)s is deprecated") % {'cls': cls.__name__} - warn(msg, DeprecationWarning, stacklevel=2) - return type.__call__(cls, *args, **kwargs) diff -r 84738d495ffd -r 793377697c81 entities/authobjs.py --- a/entities/authobjs.py Wed Sep 24 17:35:59 2014 +0200 +++ b/entities/authobjs.py Wed Sep 24 18:04:30 2014 +0200 @@ -166,6 +166,17 @@ dc_long_title = name + def __call__(self, *args, **kwargs): + """ugly hack for compatibility betweeb dbapi and repo api + + In the dbapi, Connection and Session have a ``user`` method to + generated a user for a request In the repo api, Connection and Session + have a user attribute inherited from SessionRequestBase prototype. This + ugly hack allows to not break user of the user method. + + XXX Deprecate me ASAP""" + return self + from logilab.common.deprecation import class_renamed EUser = class_renamed('EUser', CWUser) EGroup = class_renamed('EGroup', CWGroup) diff -r 84738d495ffd -r 793377697c81 entities/lib.py --- a/entities/lib.py Wed Sep 24 17:35:59 2014 +0200 +++ b/entities/lib.py Wed Sep 24 18:04:30 2014 +0200 @@ -18,6 +18,7 @@ """entity classes for optional library entities""" __docformat__ = "restructuredtext en" +from warnings import warn from urlparse import urlsplit, urlunsplit from datetime import datetime @@ -130,6 +131,13 @@ __regid__ = 'CWCache' fetch_attrs, cw_fetch_order = fetch_config(['name']) + def __init__(self, *args, **kwargs): + warn('[3.19] CWCache entity type is going away soon. ' + 'Other caching mechanisms can be used more reliably ' + 'to the same effect.', + DeprecationWarning) + super(CWCache, self).__init__(*args, **kwargs) + def touch(self): self._cw.execute('SET X timestamp %(t)s WHERE X eid %(x)s', {'t': datetime.now(), 'x': self.eid}) diff -r 84738d495ffd -r 793377697c81 entities/test/data/schema.py --- a/entities/test/data/schema.py Wed Sep 24 17:35:59 2014 +0200 +++ b/entities/test/data/schema.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -15,11 +15,9 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""entities tests schema +"""entities tests schema""" -""" - -from yams.buildobjs import EntityType, String +from yams.buildobjs import EntityType, String, RichString from cubicweb.schema import make_workflowable class Company(EntityType): diff -r 84738d495ffd -r 793377697c81 entities/test/unittest_base.py --- a/entities/test/unittest_base.py Wed Sep 24 17:35:59 2014 +0200 +++ b/entities/test/unittest_base.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -21,7 +21,6 @@ from logilab.common.testlib import unittest_main from logilab.common.decorators import clear_cache -from logilab.common.interface import implements from cubicweb.devtools.testlib import CubicWebTC @@ -31,114 +30,135 @@ class BaseEntityTC(CubicWebTC): def setup_database(self): - req = self.request() - self.member = self.create_user(req, 'member') - + with self.admin_access.repo_cnx() as cnx: + self.membereid = self.create_user(cnx, 'member').eid + cnx.commit() class MetadataTC(BaseEntityTC): def test_creator(self): - self.login(u'member') - entity = self.request().create_entity('Bookmark', title=u"hello", path=u'project/cubicweb') - self.commit() - self.assertEqual(entity.creator.eid, self.member.eid) - self.assertEqual(entity.dc_creator(), u'member') + with self.new_access('member').repo_cnx() as cnx: + entity = cnx.create_entity('Bookmark', title=u"hello", path=u'project/cubicweb') + cnx.commit() + self.assertEqual(entity.creator.eid, self.membereid) + self.assertEqual(entity.dc_creator(), u'member') def test_type(self): #dc_type may be translated - self.assertEqual(self.member.dc_type(), 'CWUser') + with self.admin_access.client_cnx() as cnx: + member = cnx.entity_from_eid(self.membereid) + self.assertEqual(member.dc_type(), 'CWUser') def test_cw_etype(self): #cw_etype is never translated - self.assertEqual(self.member.cw_etype, 'CWUser') + with self.admin_access.client_cnx() as cnx: + member = cnx.entity_from_eid(self.membereid) + self.assertEqual(member.cw_etype, 'CWUser') def test_entity_meta_attributes(self): # XXX move to yams self.assertEqual(self.schema['CWUser'].meta_attributes(), {}) - self.assertEqual(dict((str(k), v) for k, v in self.schema['State'].meta_attributes().iteritems()), + self.assertEqual(dict((str(k), v) + for k, v in self.schema['State'].meta_attributes().iteritems()), {'description_format': ('format', 'description')}) def test_fti_rql_method(self): - eclass = self.vreg['etypes'].etype_class('EmailAddress') - self.assertEqual(['Any X, ALIAS, ADDRESS WHERE X is EmailAddress, ' - 'X alias ALIAS, X address ADDRESS'], - eclass.cw_fti_index_rql_queries(self.request())) + with self.admin_access.web_request() as req: + eclass = self.vreg['etypes'].etype_class('EmailAddress') + self.assertEqual(['Any X, ALIAS, ADDRESS WHERE X is EmailAddress, ' + 'X alias ALIAS, X address ADDRESS'], + eclass.cw_fti_index_rql_queries(req)) class EmailAddressTC(BaseEntityTC): + def test_canonical_form(self): - email1 = self.execute('INSERT EmailAddress X: X address "maarten.ter.huurne@philips.com"').get_entity(0, 0) - email2 = self.execute('INSERT EmailAddress X: X address "maarten@philips.com"').get_entity(0, 0) - email3 = self.execute('INSERT EmailAddress X: X address "toto@logilab.fr"').get_entity(0, 0) - email1.cw_set(prefered_form=email2) - self.assertEqual(email1.prefered.eid, email2.eid) - self.assertEqual(email2.prefered.eid, email2.eid) - self.assertEqual(email3.prefered.eid, email3.eid) + with self.admin_access.repo_cnx() as cnx: + email1 = cnx.execute('INSERT EmailAddress X: ' + 'X address "maarten.ter.huurne@philips.com"').get_entity(0, 0) + email2 = cnx.execute('INSERT EmailAddress X: ' + 'X address "maarten@philips.com"').get_entity(0, 0) + email3 = cnx.execute('INSERT EmailAddress X: ' + 'X address "toto@logilab.fr"').get_entity(0, 0) + email1.cw_set(prefered_form=email2) + self.assertEqual(email1.prefered.eid, email2.eid) + self.assertEqual(email2.prefered.eid, email2.eid) + self.assertEqual(email3.prefered.eid, email3.eid) def test_mangling(self): - email = self.execute('INSERT EmailAddress X: X address "maarten.ter.huurne@philips.com"').get_entity(0, 0) - self.assertEqual(email.display_address(), 'maarten.ter.huurne@philips.com') - self.assertEqual(email.printable_value('address'), 'maarten.ter.huurne@philips.com') - self.vreg.config.global_set_option('mangle-emails', True) - try: - self.assertEqual(email.display_address(), 'maarten.ter.huurne at philips dot com') - self.assertEqual(email.printable_value('address'), 'maarten.ter.huurne at philips dot com') - email = self.execute('INSERT EmailAddress X: X address "syt"').get_entity(0, 0) - self.assertEqual(email.display_address(), 'syt') - self.assertEqual(email.printable_value('address'), 'syt') - finally: - self.vreg.config.global_set_option('mangle-emails', False) + with self.admin_access.repo_cnx() as cnx: + email = cnx.execute('INSERT EmailAddress X: X address "maarten.ter.huurne@philips.com"').get_entity(0, 0) + self.assertEqual(email.display_address(), 'maarten.ter.huurne@philips.com') + self.assertEqual(email.printable_value('address'), 'maarten.ter.huurne@philips.com') + self.vreg.config.global_set_option('mangle-emails', True) + try: + self.assertEqual(email.display_address(), 'maarten.ter.huurne at philips dot com') + self.assertEqual(email.printable_value('address'), 'maarten.ter.huurne at philips dot com') + email = cnx.execute('INSERT EmailAddress X: X address "syt"').get_entity(0, 0) + self.assertEqual(email.display_address(), 'syt') + self.assertEqual(email.printable_value('address'), 'syt') + finally: + self.vreg.config.global_set_option('mangle-emails', False) def test_printable_value_escape(self): - email = self.execute('INSERT EmailAddress X: X address "maarten&ter@philips.com"').get_entity(0, 0) - self.assertEqual(email.printable_value('address'), 'maarten&ter@philips.com') - self.assertEqual(email.printable_value('address', format='text/plain'), 'maarten&ter@philips.com') + with self.admin_access.repo_cnx() as cnx: + email = cnx.execute('INSERT EmailAddress X: ' + 'X address "maarten&ter@philips.com"').get_entity(0, 0) + self.assertEqual(email.printable_value('address'), + 'maarten&ter@philips.com') + self.assertEqual(email.printable_value('address', format='text/plain'), + 'maarten&ter@philips.com') class CWUserTC(BaseEntityTC): def test_complete(self): - e = self.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) - e.complete() + with self.admin_access.repo_cnx() as cnx: + e = cnx.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) + e.complete() def test_matching_groups(self): - e = self.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) - self.assertTrue(e.matching_groups('managers')) - self.assertFalse(e.matching_groups('xyz')) - self.assertTrue(e.matching_groups(('xyz', 'managers'))) - self.assertFalse(e.matching_groups(('xyz', 'abcd'))) + with self.admin_access.repo_cnx() as cnx: + e = cnx.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) + self.assertTrue(e.matching_groups('managers')) + self.assertFalse(e.matching_groups('xyz')) + self.assertTrue(e.matching_groups(('xyz', 'managers'))) + self.assertFalse(e.matching_groups(('xyz', 'abcd'))) def test_dc_title_and_name(self): - e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0) - self.assertEqual(e.dc_title(), 'member') - self.assertEqual(e.name(), 'member') - e.cw_set(firstname=u'bouah') - self.assertEqual(e.dc_title(), 'member') - self.assertEqual(e.name(), u'bouah') - e.cw_set(surname=u'lôt') - self.assertEqual(e.dc_title(), 'member') - self.assertEqual(e.name(), u'bouah lôt') + with self.admin_access.repo_cnx() as cnx: + e = cnx.execute('CWUser U WHERE U login "member"').get_entity(0, 0) + self.assertEqual(e.dc_title(), 'member') + self.assertEqual(e.name(), 'member') + e.cw_set(firstname=u'bouah') + self.assertEqual(e.dc_title(), 'member') + self.assertEqual(e.name(), u'bouah') + e.cw_set(surname=u'lôt') + self.assertEqual(e.dc_title(), 'member') + self.assertEqual(e.name(), u'bouah lôt') def test_allowed_massmail_keys(self): - e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0) - # Bytes/Password attributes should be omited - self.assertEqual(e.cw_adapt_to('IEmailable').allowed_massmail_keys(), - set(('surname', 'firstname', 'login', 'last_login_time', - 'creation_date', 'modification_date', 'cwuri', 'eid')) - ) + with self.admin_access.repo_cnx() as cnx: + e = cnx.execute('CWUser U WHERE U login "member"').get_entity(0, 0) + # Bytes/Password attributes should be omited + self.assertEqual(e.cw_adapt_to('IEmailable').allowed_massmail_keys(), + set(('surname', 'firstname', 'login', 'last_login_time', + 'creation_date', 'modification_date', 'cwuri', 'eid')) + ) def test_cw_instantiate_object_relation(self): """ a weird non regression test """ - e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0) - self.request().create_entity('CWGroup', name=u'logilab', reverse_in_group=e) + with self.admin_access.repo_cnx() as cnx: + e = cnx.execute('CWUser U WHERE U login "member"').get_entity(0, 0) + cnx.create_entity('CWGroup', name=u'logilab', reverse_in_group=e) class HTMLtransformTC(BaseEntityTC): def test_sanitized_html(self): - r = self.request() - c = r.create_entity('Company', name=u'Babar', - description=u""" + with self.admin_access.repo_cnx() as cnx: + c = cnx.create_entity('Company', name=u'Babar', + description=u""" Title ===== @@ -148,10 +168,12 @@ """, description_format=u'text/rest') - self.commit() - c.cw_clear_all_caches() - self.assertIn('alert', c.printable_value('description', format='text/plain')) - self.assertNotIn('alert', c.printable_value('description', format='text/html')) + cnx.commit() + c.cw_clear_all_caches() + self.assertIn('alert', + c.printable_value('description', format='text/plain')) + self.assertNotIn('alert', + c.printable_value('description', format='text/html')) class SpecializedEntityClassesTC(CubicWebTC): diff -r 84738d495ffd -r 793377697c81 entities/test/unittest_wfobjs.py --- a/entities/test/unittest_wfobjs.py Wed Sep 24 17:35:59 2014 +0200 +++ b/entities/test/unittest_wfobjs.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -19,12 +19,11 @@ from cubicweb import ValidationError from cubicweb.devtools.testlib import CubicWebTC - -def add_wf(self, etype, name=None, default=False): +def add_wf(shell, etype, name=None, default=False): if name is None: name = etype - return self.shell().add_workflow(name, etype, default=default, - ensure_workflowable=False) + return shell.add_workflow(name, etype, default=default, + ensure_workflowable=False) def parse_hist(wfhist): return [(ti.previous_state.name, ti.new_state.name, @@ -35,101 +34,104 @@ class WorkflowBuildingTC(CubicWebTC): def test_wf_construction(self): - wf = add_wf(self, 'Company') - foo = wf.add_state(u'foo', initial=True) - bar = wf.add_state(u'bar') - self.assertEqual(wf.state_by_name('bar').eid, bar.eid) - self.assertEqual(wf.state_by_name('barrr'), None) - baz = wf.add_transition(u'baz', (foo,), bar, ('managers',)) - self.assertEqual(wf.transition_by_name('baz').eid, baz.eid) - self.assertEqual(len(baz.require_group), 1) - self.assertEqual(baz.require_group[0].name, 'managers') + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'Company') + foo = wf.add_state(u'foo', initial=True) + bar = wf.add_state(u'bar') + self.assertEqual(wf.state_by_name('bar').eid, bar.eid) + self.assertEqual(wf.state_by_name('barrr'), None) + baz = wf.add_transition(u'baz', (foo,), bar, ('managers',)) + self.assertEqual(wf.transition_by_name('baz').eid, baz.eid) + self.assertEqual(len(baz.require_group), 1) + self.assertEqual(baz.require_group[0].name, 'managers') def test_duplicated_state(self): - wf = add_wf(self, 'Company') - wf.add_state(u'foo', initial=True) - self.commit() - wf.add_state(u'foo') - with self.assertRaises(ValidationError) as cm: - self.commit() - self.assertEqual({'name-subject': 'workflow already has a state of that name'}, - cm.exception.errors) - # no pb if not in the same workflow - wf2 = add_wf(self, 'Company') - foo = wf2.add_state(u'foo', initial=True) - self.commit() - # gnark gnark - bar = wf.add_state(u'bar') - self.commit() - bar.cw_set(name=u'foo') - with self.assertRaises(ValidationError) as cm: - self.commit() - self.assertEqual({'name-subject': 'workflow already has a state of that name'}, - cm.exception.errors) + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'Company') + wf.add_state(u'foo', initial=True) + shell.commit() + wf.add_state(u'foo') + with self.assertRaises(ValidationError) as cm: + shell.commit() + self.assertEqual({'name-subject': 'workflow already has a state of that name'}, + cm.exception.errors) + # no pb if not in the same workflow + wf2 = add_wf(shell, 'Company') + foo = wf2.add_state(u'foo', initial=True) + shell.commit() + # gnark gnark + bar = wf.add_state(u'bar') + shell.commit() + bar.cw_set(name=u'foo') + with self.assertRaises(ValidationError) as cm: + shell.commit() + self.assertEqual({'name-subject': 'workflow already has a state of that name'}, + cm.exception.errors) def test_duplicated_transition(self): - wf = add_wf(self, 'Company') - foo = wf.add_state(u'foo', initial=True) - bar = wf.add_state(u'bar') - wf.add_transition(u'baz', (foo,), bar, ('managers',)) - wf.add_transition(u'baz', (bar,), foo) - with self.assertRaises(ValidationError) as cm: - self.commit() - self.assertEqual(cm.exception.errors, {'name-subject': 'workflow already has a transition of that name'}) - # no pb if not in the same workflow - wf2 = add_wf(self, 'Company') - foo = wf.add_state(u'foo', initial=True) - bar = wf.add_state(u'bar') - wf.add_transition(u'baz', (foo,), bar, ('managers',)) - self.commit() - # gnark gnark - biz = wf.add_transition(u'biz', (bar,), foo) - self.commit() - biz.cw_set(name=u'baz') - with self.assertRaises(ValidationError) as cm: - self.commit() - self.assertEqual(cm.exception.errors, {'name-subject': 'workflow already has a transition of that name'}) + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'Company') + foo = wf.add_state(u'foo', initial=True) + bar = wf.add_state(u'bar') + wf.add_transition(u'baz', (foo,), bar, ('managers',)) + wf.add_transition(u'baz', (bar,), foo) + with self.assertRaises(ValidationError) as cm: + shell.commit() + self.assertEqual(cm.exception.errors, {'name-subject': 'workflow already has a transition of that name'}) + # no pb if not in the same workflow + wf2 = add_wf(shell, 'Company') + foo = wf.add_state(u'foo', initial=True) + bar = wf.add_state(u'bar') + wf.add_transition(u'baz', (foo,), bar, ('managers',)) + shell.commit() + # gnark gnark + biz = wf.add_transition(u'biz', (bar,), foo) + shell.commit() + biz.cw_set(name=u'baz') + with self.assertRaises(ValidationError) as cm: + shell.commit() + self.assertEqual(cm.exception.errors, {'name-subject': 'workflow already has a transition of that name'}) class WorkflowTC(CubicWebTC): def setup_database(self): - req = self.request() rschema = self.schema['in_state'] for rdef in rschema.rdefs.itervalues(): self.assertEqual(rdef.cardinality, '1*') - self.member = self.create_user(req, 'member') + with self.admin_access.client_cnx() as cnx: + self.member_eid = self.create_user(cnx, 'member').eid + cnx.commit() def test_workflow_base(self): - req = self.request() - e = self.create_user(req, 'toto') - iworkflowable = e.cw_adapt_to('IWorkflowable') - self.assertEqual(iworkflowable.state, 'activated') - iworkflowable.change_state('deactivated', u'deactivate 1') - self.commit() - iworkflowable.change_state('activated', u'activate 1') - self.commit() - iworkflowable.change_state('deactivated', u'deactivate 2') - self.commit() - e.cw_clear_relation_cache('wf_info_for', 'object') - self.assertEqual([tr.comment for tr in e.reverse_wf_info_for], - ['deactivate 1', 'activate 1', 'deactivate 2']) - self.assertEqual(iworkflowable.latest_trinfo().comment, 'deactivate 2') + with self.admin_access.web_request() as req: + e = self.create_user(req, 'toto') + iworkflowable = e.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.state, 'activated') + iworkflowable.change_state('deactivated', u'deactivate 1') + req.cnx.commit() + iworkflowable.change_state('activated', u'activate 1') + req.cnx.commit() + iworkflowable.change_state('deactivated', u'deactivate 2') + req.cnx.commit() + e.cw_clear_relation_cache('wf_info_for', 'object') + self.assertEqual([tr.comment for tr in e.reverse_wf_info_for], + ['deactivate 1', 'activate 1', 'deactivate 2']) + self.assertEqual(iworkflowable.latest_trinfo().comment, 'deactivate 2') def test_possible_transitions(self): - user = self.execute('CWUser X').get_entity(0, 0) - iworkflowable = user.cw_adapt_to('IWorkflowable') - trs = list(iworkflowable.possible_transitions()) - self.assertEqual(len(trs), 1) - self.assertEqual(trs[0].name, u'deactivate') - self.assertEqual(trs[0].destination(None).name, u'deactivated') + with self.admin_access.web_request() as req: + user = req.execute('CWUser X').get_entity(0, 0) + iworkflowable = user.cw_adapt_to('IWorkflowable') + trs = list(iworkflowable.possible_transitions()) + self.assertEqual(len(trs), 1) + self.assertEqual(trs[0].name, u'deactivate') + self.assertEqual(trs[0].destination(None).name, u'deactivated') # test a std user get no possible transition - cnx = self.login('member') - req = self.request() - # fetch the entity using the new session - trs = list(req.user.cw_adapt_to('IWorkflowable').possible_transitions()) - self.assertEqual(len(trs), 0) - cnx.close() + with self.new_access('member').web_request() as req: + # fetch the entity using the new session + trs = list(req.user.cw_adapt_to('IWorkflowable').possible_transitions()) + self.assertEqual(len(trs), 0) def _test_manager_deactivate(self, user): iworkflowable = user.cw_adapt_to('IWorkflowable') @@ -144,90 +146,93 @@ return trinfo def test_change_state(self): - user = self.user() - iworkflowable = user.cw_adapt_to('IWorkflowable') - iworkflowable.change_state('deactivated', comment=u'deactivate user') - trinfo = self._test_manager_deactivate(user) - self.assertEqual(trinfo.transition, None) + with self.admin_access.client_cnx() as cnx: + user = cnx.user + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.change_state('deactivated', comment=u'deactivate user') + trinfo = self._test_manager_deactivate(user) + self.assertEqual(trinfo.transition, None) def test_set_in_state_bad_wf(self): - wf = add_wf(self, 'CWUser') - s = wf.add_state(u'foo', initial=True) - self.commit() - with self.session.security_enabled(write=False): - with self.assertRaises(ValidationError) as cm: - self.session.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s', - {'x': self.user().eid, 's': s.eid}) - self.assertEqual(cm.exception.errors, {'in_state-subject': "state doesn't belong to entity's workflow. " - "You may want to set a custom workflow for this entity first."}) + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'CWUser') + s = wf.add_state(u'foo', initial=True) + shell.commit() + with self.admin_access.repo_cnx() as cnx: + with cnx.security_enabled(write=False): + with self.assertRaises(ValidationError) as cm: + cnx.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s', + {'x': cnx.user.eid, 's': s.eid}) + self.assertEqual(cm.exception.errors, {'in_state-subject': "state doesn't belong to entity's workflow. " + "You may want to set a custom workflow for this entity first."}) def test_fire_transition(self): - user = self.user() - iworkflowable = user.cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('deactivate', comment=u'deactivate user') - user.cw_clear_all_caches() - self.assertEqual(iworkflowable.state, 'deactivated') - self._test_manager_deactivate(user) - trinfo = self._test_manager_deactivate(user) - self.assertEqual(trinfo.transition.name, 'deactivate') + with self.admin_access.client_cnx() as cnx: + user = cnx.user + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate', comment=u'deactivate user') + user.cw_clear_all_caches() + self.assertEqual(iworkflowable.state, 'deactivated') + self._test_manager_deactivate(user) + trinfo = self._test_manager_deactivate(user) + self.assertEqual(trinfo.transition.name, 'deactivate') def test_goback_transition(self): - req = self.request() - wf = req.user.cw_adapt_to('IWorkflowable').current_workflow - asleep = wf.add_state('asleep') - wf.add_transition('rest', (wf.state_by_name('activated'), - wf.state_by_name('deactivated')), - asleep) - wf.add_transition('wake up', asleep) - user = self.create_user(req, 'stduser') - iworkflowable = user.cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('rest') - self.commit() - iworkflowable.fire_transition('wake up') - self.commit() - self.assertEqual(iworkflowable.state, 'activated') - iworkflowable.fire_transition('deactivate') - self.commit() - iworkflowable.fire_transition('rest') - self.commit() - iworkflowable.fire_transition('wake up') - self.commit() - user.cw_clear_all_caches() - self.assertEqual(iworkflowable.state, 'deactivated') + with self.admin_access.web_request() as req: + wf = req.user.cw_adapt_to('IWorkflowable').current_workflow + asleep = wf.add_state('asleep') + wf.add_transition('rest', (wf.state_by_name('activated'), + wf.state_by_name('deactivated')), + asleep) + wf.add_transition('wake up', asleep) + user = self.create_user(req, 'stduser') + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('rest') + req.cnx.commit() + iworkflowable.fire_transition('wake up') + req.cnx.commit() + self.assertEqual(iworkflowable.state, 'activated') + iworkflowable.fire_transition('deactivate') + req.cnx.commit() + iworkflowable.fire_transition('rest') + req.cnx.commit() + iworkflowable.fire_transition('wake up') + req.cnx.commit() + user.cw_clear_all_caches() + self.assertEqual(iworkflowable.state, 'deactivated') # XXX test managers can change state without matching transition def _test_stduser_deactivate(self): - ueid = self.member.eid - req = self.request() - self.create_user(req, 'tutu') - cnx = self.login('tutu') - req = self.request() - iworkflowable = req.entity_from_eid(self.member.eid).cw_adapt_to('IWorkflowable') - with self.assertRaises(ValidationError) as cm: + with self.admin_access.repo_cnx() as cnx: + self.create_user(cnx, 'tutu') + with self.new_access('tutu').web_request() as req: + iworkflowable = req.entity_from_eid(self.member_eid).cw_adapt_to('IWorkflowable') + with self.assertRaises(ValidationError) as cm: + iworkflowable.fire_transition('deactivate') + self.assertEqual(cm.exception.errors, {'by_transition-subject': "transition may not be fired"}) + with self.new_access('member').web_request() as req: + iworkflowable = req.entity_from_eid(self.member_eid).cw_adapt_to('IWorkflowable') iworkflowable.fire_transition('deactivate') - self.assertEqual(cm.exception.errors, {'by_transition-subject': "transition may not be fired"}) - cnx.close() - cnx = self.login('member') - req = self.request() - iworkflowable = req.entity_from_eid(self.member.eid).cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('deactivate') - cnx.commit() - with self.assertRaises(ValidationError) as cm: - iworkflowable.fire_transition('activate') - self.assertEqual(cm.exception.errors, {'by_transition-subject': "transition may not be fired"}) - cnx.close() + req.cnx.commit() + with self.assertRaises(ValidationError) as cm: + iworkflowable.fire_transition('activate') + self.assertEqual(cm.exception.errors, {'by_transition-subject': "transition may not be fired"}) def test_fire_transition_owned_by(self): - self.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", ' - 'X expression "X owned_by U", T condition X ' - 'WHERE T name "deactivate"') + with self.admin_access.repo_cnx() as cnx: + cnx.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", ' + 'X expression "X owned_by U", T condition X ' + 'WHERE T name "deactivate"') + cnx.commit() self._test_stduser_deactivate() def test_fire_transition_has_update_perm(self): - self.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", ' - 'X expression "U has_update_permission X", T condition X ' - 'WHERE T name "deactivate"') + with self.admin_access.repo_cnx() as cnx: + cnx.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", ' + 'X expression "U has_update_permission X", T condition X ' + 'WHERE T name "deactivate"') + cnx.commit() self._test_stduser_deactivate() def test_swf_base(self): @@ -250,334 +255,357 @@ +--------+ """ # sub-workflow - swf = add_wf(self, 'CWGroup', name='subworkflow') - swfstate1 = swf.add_state(u'swfstate1', initial=True) - swfstate2 = swf.add_state(u'swfstate2') - swfstate3 = swf.add_state(u'swfstate3') - tr1 = swf.add_transition(u'tr1', (swfstate1,), swfstate2) - tr2 = swf.add_transition(u'tr2', (swfstate1,), swfstate3) - # main workflow - mwf = add_wf(self, 'CWGroup', name='main workflow', default=True) - state1 = mwf.add_state(u'state1', initial=True) - state2 = mwf.add_state(u'state2') - state3 = mwf.add_state(u'state3') - swftr1 = mwf.add_wftransition(u'swftr1', swf, state1, - [(swfstate2, state2), (swfstate3, state3)]) - self.assertEqual(swftr1.destination(None).eid, swfstate1.eid) + with self.admin_access.shell() as shell: + swf = add_wf(shell, 'CWGroup', name='subworkflow') + swfstate1 = swf.add_state(u'swfstate1', initial=True) + swfstate2 = swf.add_state(u'swfstate2') + swfstate3 = swf.add_state(u'swfstate3') + tr1 = swf.add_transition(u'tr1', (swfstate1,), swfstate2) + tr2 = swf.add_transition(u'tr2', (swfstate1,), swfstate3) + # main workflow + mwf = add_wf(shell, 'CWGroup', name='main workflow', default=True) + state1 = mwf.add_state(u'state1', initial=True) + state2 = mwf.add_state(u'state2') + state3 = mwf.add_state(u'state3') + swftr1 = mwf.add_wftransition(u'swftr1', swf, state1, + [(swfstate2, state2), (swfstate3, state3)]) + swf.cw_clear_all_caches() + self.assertEqual(swftr1.destination(None).eid, swfstate1.eid) # workflows built, begin test - group = self.request().create_entity('CWGroup', name=u'grp1') - self.commit() - iworkflowable = group.cw_adapt_to('IWorkflowable') - self.assertEqual(iworkflowable.current_state.eid, state1.eid) - self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid) - self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) - self.assertEqual(iworkflowable.subworkflow_input_transition(), None) - iworkflowable.fire_transition('swftr1', u'go') - self.commit() - group.cw_clear_all_caches() - self.assertEqual(iworkflowable.current_state.eid, swfstate1.eid) - self.assertEqual(iworkflowable.current_workflow.eid, swf.eid) - self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) - self.assertEqual(iworkflowable.subworkflow_input_transition().eid, swftr1.eid) - iworkflowable.fire_transition('tr1', u'go') - self.commit() - group.cw_clear_all_caches() - self.assertEqual(iworkflowable.current_state.eid, state2.eid) - self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid) - self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) - self.assertEqual(iworkflowable.subworkflow_input_transition(), None) - # force back to swfstate1 is impossible since we can't any more find - # subworkflow input transition - with self.assertRaises(ValidationError) as cm: - iworkflowable.change_state(swfstate1, u'gadget') - self.assertEqual(cm.exception.errors, {'to_state-subject': "state doesn't belong to entity's workflow"}) - self.rollback() - # force back to state1 - iworkflowable.change_state('state1', u'gadget') - iworkflowable.fire_transition('swftr1', u'au') - group.cw_clear_all_caches() - iworkflowable.fire_transition('tr2', u'chapeau') - self.commit() - group.cw_clear_all_caches() - self.assertEqual(iworkflowable.current_state.eid, state3.eid) - self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid) - self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) - self.assertListEqual(parse_hist(iworkflowable.workflow_history), - [('state1', 'swfstate1', 'swftr1', 'go'), - ('swfstate1', 'swfstate2', 'tr1', 'go'), - ('swfstate2', 'state2', 'swftr1', 'exiting from subworkflow subworkflow'), - ('state2', 'state1', None, 'gadget'), - ('state1', 'swfstate1', 'swftr1', 'au'), - ('swfstate1', 'swfstate3', 'tr2', 'chapeau'), - ('swfstate3', 'state3', 'swftr1', 'exiting from subworkflow subworkflow'), - ]) + with self.admin_access.web_request() as req: + group = req.create_entity('CWGroup', name=u'grp1') + req.cnx.commit() + iworkflowable = group.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.current_state.eid, state1.eid) + self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.subworkflow_input_transition(), None) + iworkflowable.fire_transition('swftr1', u'go') + req.cnx.commit() + group.cw_clear_all_caches() + self.assertEqual(iworkflowable.current_state.eid, swfstate1.eid) + self.assertEqual(iworkflowable.current_workflow.eid, swf.eid) + self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.subworkflow_input_transition().eid, swftr1.eid) + iworkflowable.fire_transition('tr1', u'go') + req.cnx.commit() + group.cw_clear_all_caches() + self.assertEqual(iworkflowable.current_state.eid, state2.eid) + self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.subworkflow_input_transition(), None) + # force back to swfstate1 is impossible since we can't any more find + # subworkflow input transition + with self.assertRaises(ValidationError) as cm: + iworkflowable.change_state(swfstate1, u'gadget') + self.assertEqual(cm.exception.errors, {'to_state-subject': "state doesn't belong to entity's workflow"}) + req.cnx.rollback() + # force back to state1 + iworkflowable.change_state('state1', u'gadget') + iworkflowable.fire_transition('swftr1', u'au') + group.cw_clear_all_caches() + iworkflowable.fire_transition('tr2', u'chapeau') + req.cnx.commit() + group.cw_clear_all_caches() + self.assertEqual(iworkflowable.current_state.eid, state3.eid) + self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) + self.assertListEqual(parse_hist(iworkflowable.workflow_history), + [('state1', 'swfstate1', 'swftr1', 'go'), + ('swfstate1', 'swfstate2', 'tr1', 'go'), + ('swfstate2', 'state2', 'swftr1', 'exiting from subworkflow subworkflow'), + ('state2', 'state1', None, 'gadget'), + ('state1', 'swfstate1', 'swftr1', 'au'), + ('swfstate1', 'swfstate3', 'tr2', 'chapeau'), + ('swfstate3', 'state3', 'swftr1', 'exiting from subworkflow subworkflow'), + ]) def test_swf_exit_consistency(self): - # sub-workflow - swf = add_wf(self, 'CWGroup', name='subworkflow') - swfstate1 = swf.add_state(u'swfstate1', initial=True) - swfstate2 = swf.add_state(u'swfstate2') - tr1 = swf.add_transition(u'tr1', (swfstate1,), swfstate2) - # main workflow - mwf = add_wf(self, 'CWGroup', name='main workflow', default=True) - state1 = mwf.add_state(u'state1', initial=True) - state2 = mwf.add_state(u'state2') - state3 = mwf.add_state(u'state3') - mwf.add_wftransition(u'swftr1', swf, state1, - [(swfstate2, state2), (swfstate2, state3)]) - with self.assertRaises(ValidationError) as cm: - self.commit() - self.assertEqual(cm.exception.errors, {'subworkflow_exit-subject': u"can't have multiple exits on the same state"}) + with self.admin_access.shell() as shell: + # sub-workflow + swf = add_wf(shell, 'CWGroup', name='subworkflow') + swfstate1 = swf.add_state(u'swfstate1', initial=True) + swfstate2 = swf.add_state(u'swfstate2') + tr1 = swf.add_transition(u'tr1', (swfstate1,), swfstate2) + # main workflow + mwf = add_wf(shell, 'CWGroup', name='main workflow', default=True) + state1 = mwf.add_state(u'state1', initial=True) + state2 = mwf.add_state(u'state2') + state3 = mwf.add_state(u'state3') + mwf.add_wftransition(u'swftr1', swf, state1, + [(swfstate2, state2), (swfstate2, state3)]) + with self.assertRaises(ValidationError) as cm: + shell.commit() + self.assertEqual(cm.exception.errors, {'subworkflow_exit-subject': u"can't have multiple exits on the same state"}) def test_swf_fire_in_a_row(self): - # sub-workflow - subwf = add_wf(self, 'CWGroup', name='subworkflow') - xsigning = subwf.add_state('xsigning', initial=True) - xaborted = subwf.add_state('xaborted') - xsigned = subwf.add_state('xsigned') - xabort = subwf.add_transition('xabort', (xsigning,), xaborted) - xsign = subwf.add_transition('xsign', (xsigning,), xsigning) - xcomplete = subwf.add_transition('xcomplete', (xsigning,), xsigned, - type=u'auto') - # main workflow - twf = add_wf(self, 'CWGroup', name='mainwf', default=True) - created = twf.add_state(_('created'), initial=True) - identified = twf.add_state(_('identified')) - released = twf.add_state(_('released')) - closed = twf.add_state(_('closed')) - twf.add_wftransition(_('identify'), subwf, (created,), - [(xsigned, identified), (xaborted, created)]) - twf.add_wftransition(_('release'), subwf, (identified,), - [(xsigned, released), (xaborted, identified)]) - twf.add_wftransition(_('close'), subwf, (released,), - [(xsigned, closed), (xaborted, released)]) - self.commit() - group = self.request().create_entity('CWGroup', name=u'grp1') - self.commit() - iworkflowable = group.cw_adapt_to('IWorkflowable') - for trans in ('identify', 'release', 'close'): - iworkflowable.fire_transition(trans) - self.commit() + with self.admin_access.shell() as shell: + # sub-workflow + subwf = add_wf(shell, 'CWGroup', name='subworkflow') + xsigning = subwf.add_state('xsigning', initial=True) + xaborted = subwf.add_state('xaborted') + xsigned = subwf.add_state('xsigned') + xabort = subwf.add_transition('xabort', (xsigning,), xaborted) + xsign = subwf.add_transition('xsign', (xsigning,), xsigning) + xcomplete = subwf.add_transition('xcomplete', (xsigning,), xsigned, + type=u'auto') + # main workflow + twf = add_wf(shell, 'CWGroup', name='mainwf', default=True) + created = twf.add_state(_('created'), initial=True) + identified = twf.add_state(_('identified')) + released = twf.add_state(_('released')) + closed = twf.add_state(_('closed')) + twf.add_wftransition(_('identify'), subwf, (created,), + [(xsigned, identified), (xaborted, created)]) + twf.add_wftransition(_('release'), subwf, (identified,), + [(xsigned, released), (xaborted, identified)]) + twf.add_wftransition(_('close'), subwf, (released,), + [(xsigned, closed), (xaborted, released)]) + shell.commit() + with self.admin_access.repo_cnx() as cnx: + group = cnx.create_entity('CWGroup', name=u'grp1') + cnx.commit() + iworkflowable = group.cw_adapt_to('IWorkflowable') + for trans in ('identify', 'release', 'close'): + iworkflowable.fire_transition(trans) + cnx.commit() def test_swf_magic_tr(self): - # sub-workflow - subwf = add_wf(self, 'CWGroup', name='subworkflow') - xsigning = subwf.add_state('xsigning', initial=True) - xaborted = subwf.add_state('xaborted') - xsigned = subwf.add_state('xsigned') - xabort = subwf.add_transition('xabort', (xsigning,), xaborted) - xsign = subwf.add_transition('xsign', (xsigning,), xsigned) - # main workflow - twf = add_wf(self, 'CWGroup', name='mainwf', default=True) - created = twf.add_state(_('created'), initial=True) - identified = twf.add_state(_('identified')) - released = twf.add_state(_('released')) - twf.add_wftransition(_('identify'), subwf, created, - [(xaborted, None), (xsigned, identified)]) - twf.add_wftransition(_('release'), subwf, identified, - [(xaborted, None)]) - self.commit() - group = self.request().create_entity('CWGroup', name=u'grp1') - self.commit() - iworkflowable = group.cw_adapt_to('IWorkflowable') - for trans, nextstate in (('identify', 'xsigning'), - ('xabort', 'created'), - ('identify', 'xsigning'), - ('xsign', 'identified'), - ('release', 'xsigning'), - ('xabort', 'identified') - ): - iworkflowable.fire_transition(trans) - self.commit() - group.cw_clear_all_caches() - self.assertEqual(iworkflowable.state, nextstate) + with self.admin_access.shell() as shell: + # sub-workflow + subwf = add_wf(shell, 'CWGroup', name='subworkflow') + xsigning = subwf.add_state('xsigning', initial=True) + xaborted = subwf.add_state('xaborted') + xsigned = subwf.add_state('xsigned') + xabort = subwf.add_transition('xabort', (xsigning,), xaborted) + xsign = subwf.add_transition('xsign', (xsigning,), xsigned) + # main workflow + twf = add_wf(shell, 'CWGroup', name='mainwf', default=True) + created = twf.add_state(_('created'), initial=True) + identified = twf.add_state(_('identified')) + released = twf.add_state(_('released')) + twf.add_wftransition(_('identify'), subwf, created, + [(xaborted, None), (xsigned, identified)]) + twf.add_wftransition(_('release'), subwf, identified, + [(xaborted, None)]) + shell.commit() + with self.admin_access.web_request() as req: + group = req.create_entity('CWGroup', name=u'grp1') + req.cnx.commit() + iworkflowable = group.cw_adapt_to('IWorkflowable') + for trans, nextstate in (('identify', 'xsigning'), + ('xabort', 'created'), + ('identify', 'xsigning'), + ('xsign', 'identified'), + ('release', 'xsigning'), + ('xabort', 'identified') + ): + iworkflowable.fire_transition(trans) + req.cnx.commit() + group.cw_clear_all_caches() + self.assertEqual(iworkflowable.state, nextstate) class CustomWorkflowTC(CubicWebTC): def setup_database(self): - req = self.request() - self.member = self.create_user(req, 'member') + with self.admin_access.repo_cnx() as cnx: + self.member_eid = self.create_user(cnx, 'member').eid def test_custom_wf_replace_state_no_history(self): """member in inital state with no previous history, state is simply redirected when changing workflow """ - wf = add_wf(self, 'CWUser') - wf.add_state('asleep', initial=True) - self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': self.member.eid}) - self.member.cw_clear_all_caches() - iworkflowable = self.member.cw_adapt_to('IWorkflowable') - self.assertEqual(iworkflowable.state, 'activated')# no change before commit - self.commit() - self.member.cw_clear_all_caches() - self.assertEqual(iworkflowable.current_workflow.eid, wf.eid) - self.assertEqual(iworkflowable.state, 'asleep') - self.assertEqual(iworkflowable.workflow_history, ()) + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'CWUser') + wf.add_state('asleep', initial=True) + with self.admin_access.web_request() as req: + req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + {'wf': wf.eid, 'x': self.member_eid}) + member = req.entity_from_eid(self.member_eid) + iworkflowable = member.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.state, 'activated') # no change before commit + req.cnx.commit() + member.cw_clear_all_caches() + self.assertEqual(iworkflowable.current_workflow.eid, wf.eid) + self.assertEqual(iworkflowable.state, 'asleep') + self.assertEqual(iworkflowable.workflow_history, ()) def test_custom_wf_replace_state_keep_history(self): """member in inital state with some history, state is redirected and state change is recorded to history """ - iworkflowable = self.member.cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('deactivate') - iworkflowable.fire_transition('activate') - wf = add_wf(self, 'CWUser') - wf.add_state('asleep', initial=True) - self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': self.member.eid}) - self.commit() - self.member.cw_clear_all_caches() - self.assertEqual(iworkflowable.current_workflow.eid, wf.eid) - self.assertEqual(iworkflowable.state, 'asleep') - self.assertEqual(parse_hist(iworkflowable.workflow_history), - [('activated', 'deactivated', 'deactivate', None), - ('deactivated', 'activated', 'activate', None), - ('activated', 'asleep', None, 'workflow changed to "CWUser"')]) + with self.admin_access.web_request() as req: + member = req.entity_from_eid(self.member_eid) + iworkflowable = member.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') + iworkflowable.fire_transition('activate') + req.cnx.commit() + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'CWUser') + wf.add_state('asleep', initial=True) + shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + {'wf': wf.eid, 'x': self.member_eid}) + with self.admin_access.web_request() as req: + member = req.entity_from_eid(self.member_eid) + iworkflowable = member.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.current_workflow.eid, wf.eid) + self.assertEqual(iworkflowable.state, 'asleep') + self.assertEqual(parse_hist(iworkflowable.workflow_history), + [('activated', 'deactivated', 'deactivate', None), + ('deactivated', 'activated', 'activate', None), + ('activated', 'asleep', None, 'workflow changed to "CWUser"')]) def test_custom_wf_no_initial_state(self): """try to set a custom workflow which has no initial state""" - iworkflowable = self.member.cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('deactivate') - wf = add_wf(self, 'CWUser') - wf.add_state('asleep') - self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': self.member.eid}) - with self.assertRaises(ValidationError) as cm: - self.commit() - self.assertEqual(cm.exception.errors, {'custom_workflow-subject': u'workflow has no initial state'}) + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'CWUser') + wf.add_state('asleep') + shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + {'wf': wf.eid, 'x': self.member_eid}) + with self.assertRaises(ValidationError) as cm: + shell.commit() + self.assertEqual(cm.exception.errors, {'custom_workflow-subject': u'workflow has no initial state'}) def test_custom_wf_bad_etype(self): """try to set a custom workflow which doesn't apply to entity type""" - wf = add_wf(self, 'Company') - wf.add_state('asleep', initial=True) - self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': self.member.eid}) - with self.assertRaises(ValidationError) as cm: - self.commit() - self.assertEqual(cm.exception.errors, {'custom_workflow-subject': u"workflow isn't a workflow for this type"}) + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'Company') + wf.add_state('asleep', initial=True) + shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + {'wf': wf.eid, 'x': self.member_eid}) + with self.assertRaises(ValidationError) as cm: + shell.commit() + self.assertEqual(cm.exception.errors, {'custom_workflow-subject': u"workflow isn't a workflow for this type"}) def test_del_custom_wf(self): """member in some state shared by the new workflow, nothing has to be done """ - iworkflowable = self.member.cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('deactivate') - wf = add_wf(self, 'CWUser') - wf.add_state('asleep', initial=True) - self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': self.member.eid}) - self.commit() - self.execute('DELETE X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': self.member.eid}) - self.member.cw_clear_all_caches() - self.assertEqual(iworkflowable.state, 'asleep')# no change before commit - self.commit() - self.member.cw_clear_all_caches() - self.assertEqual(iworkflowable.current_workflow.name, "default user workflow") - self.assertEqual(iworkflowable.state, 'activated') - self.assertEqual(parse_hist(iworkflowable.workflow_history), - [('activated', 'deactivated', 'deactivate', None), - ('deactivated', 'asleep', None, 'workflow changed to "CWUser"'), - ('asleep', 'activated', None, 'workflow changed to "default user workflow"'),]) + with self.admin_access.web_request() as req: + member = req.entity_from_eid(self.member_eid) + iworkflowable = member.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') + req.cnx.commit() + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'CWUser') + wf.add_state('asleep', initial=True) + shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + {'wf': wf.eid, 'x': self.member_eid}) + shell.commit() + with self.admin_access.web_request() as req: + req.execute('DELETE X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + {'wf': wf.eid, 'x': self.member_eid}) + member = req.entity_from_eid(self.member_eid) + iworkflowable = member.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.state, 'asleep')# no change before commit + req.cnx.commit() + member.cw_clear_all_caches() + self.assertEqual(iworkflowable.current_workflow.name, "default user workflow") + self.assertEqual(iworkflowable.state, 'activated') + self.assertEqual(parse_hist(iworkflowable.workflow_history), + [('activated', 'deactivated', 'deactivate', None), + ('deactivated', 'asleep', None, 'workflow changed to "CWUser"'), + ('asleep', 'activated', None, 'workflow changed to "default user workflow"'),]) class AutoTransitionTC(CubicWebTC): def setup_custom_wf(self): - wf = add_wf(self, 'CWUser') - asleep = wf.add_state('asleep', initial=True) - dead = wf.add_state('dead') - wf.add_transition('rest', asleep, asleep) - wf.add_transition('sick', asleep, dead, type=u'auto', - conditions=({'expr': u'X surname "toto"', - 'mainvars': u'X'},)) + with self.admin_access.shell() as shell: + wf = add_wf(shell, 'CWUser') + asleep = wf.add_state('asleep', initial=True) + dead = wf.add_state('dead') + wf.add_transition('rest', asleep, asleep) + wf.add_transition('sick', asleep, dead, type=u'auto', + conditions=({'expr': u'X surname "toto"', + 'mainvars': u'X'},)) return wf def test_auto_transition_fired(self): wf = self.setup_custom_wf() - req = self.request() - user = self.create_user(req, 'member') - iworkflowable = user.cw_adapt_to('IWorkflowable') - self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': user.eid}) - self.commit() - user.cw_clear_all_caches() - self.assertEqual(iworkflowable.state, 'asleep') - self.assertEqual([t.name for t in iworkflowable.possible_transitions()], - ['rest']) - iworkflowable.fire_transition('rest') - self.commit() - user.cw_clear_all_caches() - self.assertEqual(iworkflowable.state, 'asleep') - self.assertEqual([t.name for t in iworkflowable.possible_transitions()], - ['rest']) - self.assertEqual(parse_hist(iworkflowable.workflow_history), - [('asleep', 'asleep', 'rest', None)]) - user.cw_set(surname=u'toto') # fulfill condition - self.commit() - iworkflowable.fire_transition('rest') - self.commit() - user.cw_clear_all_caches() - self.assertEqual(iworkflowable.state, 'dead') - self.assertEqual(parse_hist(iworkflowable.workflow_history), - [('asleep', 'asleep', 'rest', None), - ('asleep', 'asleep', 'rest', None), - ('asleep', 'dead', 'sick', None),]) + with self.admin_access.web_request() as req: + user = self.create_user(req, 'member') + iworkflowable = user.cw_adapt_to('IWorkflowable') + req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + {'wf': wf.eid, 'x': user.eid}) + req.cnx.commit() + user.cw_clear_all_caches() + self.assertEqual(iworkflowable.state, 'asleep') + self.assertEqual([t.name for t in iworkflowable.possible_transitions()], + ['rest']) + iworkflowable.fire_transition('rest') + req.cnx.commit() + user.cw_clear_all_caches() + self.assertEqual(iworkflowable.state, 'asleep') + self.assertEqual([t.name for t in iworkflowable.possible_transitions()], + ['rest']) + self.assertEqual(parse_hist(iworkflowable.workflow_history), + [('asleep', 'asleep', 'rest', None)]) + user.cw_set(surname=u'toto') # fulfill condition + req.cnx.commit() + iworkflowable.fire_transition('rest') + req.cnx.commit() + user.cw_clear_all_caches() + self.assertEqual(iworkflowable.state, 'dead') + self.assertEqual(parse_hist(iworkflowable.workflow_history), + [('asleep', 'asleep', 'rest', None), + ('asleep', 'asleep', 'rest', None), + ('asleep', 'dead', 'sick', None),]) def test_auto_transition_custom_initial_state_fired(self): wf = self.setup_custom_wf() - req = self.request() - user = self.create_user(req, 'member', surname=u'toto') - self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', + with self.admin_access.web_request() as req: + user = self.create_user(req, 'member', surname=u'toto') + req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': user.eid}) - self.commit() - iworkflowable = user.cw_adapt_to('IWorkflowable') - self.assertEqual(iworkflowable.state, 'dead') + req.cnx.commit() + iworkflowable = user.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.state, 'dead') def test_auto_transition_initial_state_fired(self): - wf = self.execute('Any WF WHERE ET default_workflow WF, ' - 'ET name %(et)s', {'et': 'CWUser'}).get_entity(0, 0) - dead = wf.add_state('dead') - wf.add_transition('sick', wf.state_by_name('activated'), dead, - type=u'auto', conditions=({'expr': u'X surname "toto"', - 'mainvars': u'X'},)) - self.commit() - req = self.request() - user = self.create_user(req, 'member', surname=u'toto') - self.commit() - iworkflowable = user.cw_adapt_to('IWorkflowable') - self.assertEqual(iworkflowable.state, 'dead') + with self.admin_access.web_request() as req: + wf = req.execute('Any WF WHERE ET default_workflow WF, ' + 'ET name %(et)s', {'et': 'CWUser'}).get_entity(0, 0) + dead = wf.add_state('dead') + wf.add_transition('sick', wf.state_by_name('activated'), dead, + type=u'auto', conditions=({'expr': u'X surname "toto"', + 'mainvars': u'X'},)) + req.cnx.commit() + with self.admin_access.web_request() as req: + user = self.create_user(req, 'member', surname=u'toto') + req.cnx.commit() + iworkflowable = user.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.state, 'dead') class WorkflowHooksTC(CubicWebTC): def setUp(self): CubicWebTC.setUp(self) - req = self.request() - self.wf = req.user.cw_adapt_to('IWorkflowable').current_workflow - self.s_activated = self.wf.state_by_name('activated').eid - self.s_deactivated = self.wf.state_by_name('deactivated').eid - self.s_dummy = self.wf.add_state(u'dummy').eid - self.wf.add_transition(u'dummy', (self.s_deactivated,), self.s_dummy) - ueid = self.create_user(req, 'stduser', commit=False).eid - # test initial state is set - rset = self.execute('Any N WHERE S name N, X in_state S, X eid %(x)s', - {'x' : ueid}) - self.assertFalse(rset, rset.rows) - self.commit() - initialstate = self.execute('Any N WHERE S name N, X in_state S, X eid %(x)s', - {'x' : ueid})[0][0] - self.assertEqual(initialstate, u'activated') - # give access to users group on the user's wf transitions - # so we can test wf enforcing on euser (managers don't have anymore this - # enforcement - self.execute('SET X require_group G ' - 'WHERE G name "users", X transition_of WF, WF eid %(wf)s', - {'wf': self.wf.eid}) - self.commit() + with self.admin_access.web_request() as req: + self.wf = req.user.cw_adapt_to('IWorkflowable').current_workflow + self.s_activated = self.wf.state_by_name('activated').eid + self.s_deactivated = self.wf.state_by_name('deactivated').eid + self.s_dummy = self.wf.add_state(u'dummy').eid + self.wf.add_transition(u'dummy', (self.s_deactivated,), self.s_dummy) + ueid = self.create_user(req, 'stduser', commit=False).eid + # test initial state is set + rset = req.execute('Any N WHERE S name N, X in_state S, X eid %(x)s', + {'x' : ueid}) + self.assertFalse(rset, rset.rows) + req.cnx.commit() + initialstate = req.execute('Any N WHERE S name N, X in_state S, X eid %(x)s', + {'x' : ueid})[0][0] + self.assertEqual(initialstate, u'activated') + # give access to users group on the user's wf transitions + # so we can test wf enforcing on euser (managers don't have anymore this + # enforcement + req.execute('SET X require_group G ' + 'WHERE G name "users", X transition_of WF, WF eid %(wf)s', + {'wf': self.wf.eid}) + req.cnx.commit() # XXX currently, we've to rely on hooks to set initial state, or to use execute # def test_initial_state(self): @@ -602,42 +630,37 @@ return ' '.join(lmsg) def test_transition_checking1(self): - cnx = self.login('stduser') - user = cnx.user(self.session) - iworkflowable = user.cw_adapt_to('IWorkflowable') - with self.assertRaises(ValidationError) as cm: - iworkflowable.fire_transition('activate') - self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']), - u"transition isn't allowed from") - cnx.close() + with self.new_access('stduser').repo_cnx() as cnx: + user = cnx.user + iworkflowable = user.cw_adapt_to('IWorkflowable') + with self.assertRaises(ValidationError) as cm: + iworkflowable.fire_transition('activate') + self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']), + u"transition isn't allowed from") def test_transition_checking2(self): - cnx = self.login('stduser') - user = cnx.user(self.session) - iworkflowable = user.cw_adapt_to('IWorkflowable') - with self.assertRaises(ValidationError) as cm: - iworkflowable.fire_transition('dummy') - self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']), - u"transition isn't allowed from") - cnx.close() + with self.new_access('stduser').repo_cnx() as cnx: + user = cnx.user + iworkflowable = user.cw_adapt_to('IWorkflowable') + with self.assertRaises(ValidationError) as cm: + iworkflowable.fire_transition('dummy') + self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']), + u"transition isn't allowed from") def test_transition_checking3(self): - with self.login('stduser') as cnx: - session = self.session - user = self.user() + with self.new_access('stduser').repo_cnx() as cnx: + user = cnx.user iworkflowable = user.cw_adapt_to('IWorkflowable') iworkflowable.fire_transition('deactivate') - session.commit() - session.set_cnxset() + cnx.commit() with self.assertRaises(ValidationError) as cm: iworkflowable.fire_transition('deactivate') self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']), u"transition isn't allowed from") - session.rollback() - session.set_cnxset() + cnx.rollback() # get back now iworkflowable.fire_transition('activate') - session.commit() + cnx.commit() if __name__ == '__main__': diff -r 84738d495ffd -r 793377697c81 entity.py --- a/entity.py Wed Sep 24 17:35:59 2014 +0200 +++ b/entity.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -344,7 +344,8 @@ cls.warning('skipping fetch_attr %s defined in %s (not found in schema)', attr, cls.__regid__) continue - rdef = eschema.rdef(attr) + # XXX takefirst=True to remove warning triggered by ambiguous inlined relations + rdef = eschema.rdef(attr, takefirst=True) if not user.matching_groups(rdef.get_groups('read')): continue if rschema.final or rdef.cardinality[0] in '?1': @@ -424,8 +425,10 @@ needcheck = not cls.e_schema.has_unique_values(mainattr) else: for rschema in cls.e_schema.subject_relations(): - if rschema.final and rschema != 'eid' \ - and cls.e_schema.has_unique_values(rschema): + if (rschema.final + and rschema != 'eid' + and cls.e_schema.has_unique_values(rschema) + and cls.e_schema.rdef(rschema.type).cardinality[0] == '1'): mainattr = str(rschema) needcheck = False break @@ -551,14 +554,12 @@ def _cw_update_attr_cache(self, attrcache): # if context is a repository session, don't consider dont-cache-attrs as - # the instance already hold modified values and loosing them could + # the instance already holds modified values and loosing them could # introduce severe problems - get_set = partial(self._cw.get_shared_data, default=(), txdata=True, - pop=True) - uncached_attrs = set() - uncached_attrs.update(get_set('%s.storage-special-process-attrs' % self.eid)) + trdata = self._cw.transaction_data + uncached_attrs = trdata.get('%s.storage-special-process-attrs' % self.eid, set()) if self._cw.is_request: - uncached_attrs.update(get_set('%s.dont-cache-attrs' % self.eid)) + uncached_attrs.update(trdata.get('%s.dont-cache-attrs' % self.eid, set())) for attr in uncached_attrs: attrcache.pop(attr, None) self.cw_attr_cache.pop(attr, None) @@ -633,11 +634,9 @@ @cached def cw_metainformation(self): - res = self._cw.describe(self.eid, asdict=True) - # use 'asource' and not 'source' since this is the actual source, - # while 'source' is the physical source (where it's stored) - res['source'] = self._cw.source_defs()[res.pop('asource')] - return res + metas = self._cw.entity_metas(self.eid) + metas['source'] = self._cw.source_defs()[metas['source']] + return metas def cw_check_perm(self, action): self.e_schema.check_perm(self._cw, action, eid=self.eid) @@ -797,8 +796,9 @@ # skip already defined relations if getattr(self, rschema.type): continue + # XXX takefirst=True to remove warning triggered by ambiguous relations + rdef = self.e_schema.rdef(rschema, takefirst=True) # skip composite relation - rdef = self.e_schema.rdef(rschema) if rdef.composite: continue # skip relation with card in ?1 else we either change the copied @@ -817,7 +817,8 @@ continue if rschema.type in skip_copy_for['object']: continue - rdef = self.e_schema.rdef(rschema, 'object') + # XXX takefirst=True to remove warning triggered by ambiguous relations + rdef = self.e_schema.rdef(rschema, 'object', takefirst=True) # skip composite relation if rdef.composite: continue @@ -1076,6 +1077,25 @@ # generic vocabulary methods ############################################## + def cw_linkable_rql(self, rtype, targettype, role, ordermethod=None, + vocabconstraints=True, lt_infos={}, limit=None): + """build a rql to fetch targettype entities either related or unrelated + to this entity using (rtype, role) relation. + + Consider relation permissions so that returned entities may be actually + linked by `rtype`. + + `lt_infos` are supplementary informations, usually coming from __linkto + parameter, that can help further restricting the results in case current + entity is not yet created. It is a dict describing entities the current + entity will be linked to, which keys are (rtype, role) tuples and values + are a list of eids. + """ + return self._cw_compute_linkable_rql(rtype, targettype, role, ordermethod=None, + vocabconstraints=vocabconstraints, + lt_infos=lt_infos, limit=limit, + unrelated_only=False) + def cw_unrelated_rql(self, rtype, targettype, role, ordermethod=None, vocabconstraints=True, lt_infos={}, limit=None): """build a rql to fetch `targettype` entities unrelated to this entity @@ -1090,6 +1110,21 @@ entity will be linked to, which keys are (rtype, role) tuples and values are a list of eids. """ + return self._cw_compute_linkable_rql(rtype, targettype, role, ordermethod=None, + vocabconstraints=vocabconstraints, + lt_infos=lt_infos, limit=limit, + unrelated_only=True) + + def _cw_compute_linkable_rql(self, rtype, targettype, role, ordermethod=None, + vocabconstraints=True, lt_infos={}, limit=None, + unrelated_only=False): + """build a rql to fetch `targettype` entities that may be related to + this entity using the (rtype, role) relation. + + By default (unrelated_only=False), this includes the already linked + entities as well as the unrelated ones. If `unrelated_only` is True, the + rql filters out the already related entities. + """ ordermethod = ordermethod or 'fetch_unrelated_order' rschema = self._cw.vreg.schema.rschema(rtype) rdef = rschema.role_rdef(self.e_schema, targettype, role) @@ -1118,7 +1153,7 @@ else: rel = make_relation(searchedvar, rtype, (variable,), VariableRef) select.add_restriction(Not(rel)) - elif self.has_eid(): + elif self.has_eid() and unrelated_only: # elif we have an eid, we don't want a target entity which is # already linked to ourself through this relation rel = make_relation(subjvar, rtype, (objvar,), VariableRef) diff -r 84738d495ffd -r 793377697c81 etwist/server.py --- a/etwist/server.py Wed Sep 24 17:35:59 2014 +0200 +++ b/etwist/server.py Wed Sep 24 18:04:30 2014 +0200 @@ -47,22 +47,14 @@ # to wait all tasks to be finished for the server to be actually started lc.start(interval, now=False) -def host_prefixed_baseurl(baseurl, host): - scheme, netloc, url, query, fragment = urlsplit(baseurl) - netloc_domain = '.' + '.'.join(netloc.split('.')[-2:]) - if host.endswith(netloc_domain): - netloc = host - baseurl = urlunsplit((scheme, netloc, url, query, fragment)) - return baseurl - class CubicWebRootResource(resource.Resource): - def __init__(self, config, vreg=None): + def __init__(self, config, repo): resource.Resource.__init__(self) self.config = config # instantiate publisher here and not in init_publisher to get some # checks done before daemonization (eg versions consistency) - self.appli = CubicWebPublisher(config, vreg=vreg) + self.appli = CubicWebPublisher(repo, config) self.base_url = config['base-url'] self.https_url = config['https-url'] global MAX_POST_LENGTH @@ -77,6 +69,7 @@ # if pyro is enabled, we have to register to the pyro name # server, create a pyro daemon, and create a task to handle pyro # requests + self.appli.repo.warning('remote repository access through pyro is deprecated') self.pyro_daemon = self.appli.repo.pyro_register() self.pyro_listen_timeout = 0.02 self.appli.repo.looping_task(1, self.pyro_loop_event) @@ -185,7 +178,7 @@ path = self.channel._path.split('?', 1)[0].rstrip('/').rsplit('/', 1)[-1] self.clientproto = 'HTTP/1.1' # not yet initialized self.channel.persistent = 0 # force connection close on cleanup - self.setResponseCode(http.BAD_REQUEST) + self.setResponseCode(http.REQUEST_ENTITY_TOO_LARGE) if path in JSON_PATHS: # XXX better json path detection self.setHeader('content-type',"application/json") body = json_dumps({'reason': 'request max size exceeded'}) @@ -271,12 +264,20 @@ LOGGER = getLogger('cubicweb.twisted') set_log_methods(CubicWebRootResource, LOGGER) -def run(config, vreg=None, debug=None): +def run(config, debug=None, repo=None): + # repo may by passed during test. + # + # Test has already created a repo object so we should not create a new one. + # Explicitly passing the repo object avoid relying on the fragile + # config.repository() cache. We could imagine making repo a mandatory + # argument and receives it from the starting command directly. if debug is not None: config.debugmode = debug config.check_writeable_uid_directory(config.appdatahome) # create the site - root_resource = CubicWebRootResource(config, vreg=vreg) + if repo is None: + repo = config.repository() + root_resource = CubicWebRootResource(config, repo) website = server.Site(root_resource) # serve it via standard HTTP on port set in the configuration port = config['port'] or 8080 diff -r 84738d495ffd -r 793377697c81 etwist/test/unittest_server.py --- a/etwist/test/unittest_server.py Wed Sep 24 17:35:59 2014 +0200 +++ b/etwist/test/unittest_server.py Wed Sep 24 18:04:30 2014 +0200 @@ -19,41 +19,7 @@ import os, os.path as osp, glob import urllib -from cubicweb.devtools.testlib import CubicWebTC from cubicweb.devtools.httptest import CubicWebServerTC -from cubicweb.etwist.server import host_prefixed_baseurl - - -class HostPrefixedBaseURLTC(CubicWebTC): - - def _check(self, baseurl, host, waited): - self.assertEqual(host_prefixed_baseurl(baseurl, host), waited, - 'baseurl %s called through host %s should be considered as %s' - % (baseurl, host, waited)) - - def test1(self): - self._check('http://www.cubicweb.org/hg/', 'code.cubicweb.org', - 'http://code.cubicweb.org/hg/') - - def test2(self): - self._check('http://www.cubicweb.org/hg/', 'cubicweb.org', - 'http://www.cubicweb.org/hg/') - - def test3(self): - self._check('http://cubicweb.org/hg/', 'code.cubicweb.org', - 'http://code.cubicweb.org/hg/') - - def test4(self): - self._check('http://www.cubicweb.org/hg/', 'localhost', - 'http://www.cubicweb.org/hg/') - - def test5(self): - self._check('http://www.cubicweb.org/cubes/', 'hg.code.cubicweb.org', - 'http://hg.code.cubicweb.org/cubes/') - - def test6(self): - self._check('http://localhost:8080/hg/', 'code.cubicweb.org', - 'http://localhost:8080/hg/') class ETwistHTTPTC(CubicWebServerTC): diff -r 84738d495ffd -r 793377697c81 ext/test/unittest_rest.py --- a/ext/test/unittest_rest.py Wed Sep 24 17:35:59 2014 +0200 +++ b/ext/test/unittest_rest.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -21,18 +21,23 @@ from cubicweb.ext.rest import rest_publish class RestTC(CubicWebTC): - def context(self): - return self.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) + + def context(self, req): + return req.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) def test_eid_role(self): - context = self.context() - self.assertEqual(rest_publish(context, ':eid:`%s`' % context.eid), - '

#%s

\n' % context.eid) - self.assertEqual(rest_publish(context, ':eid:`%s:some text`' % context.eid), - '

some text

\n') + with self.admin_access.web_request() as req: + context = self.context(req) + self.assertEqual(rest_publish(context, ':eid:`%s`' % context.eid), + '

' + '#%s

\n' % context.eid) + self.assertEqual(rest_publish(context, ':eid:`%s:some text`' % context.eid), + '

' + 'some text

\n') def test_bad_rest_no_crash(self): - data = rest_publish(self.context(), ''' + with self.admin_access.web_request() as req: + rest_publish(self.context(req), ''' | card | implication | -------------------------- | 1-1 | N1 = N2 | @@ -55,159 +60,172 @@ def test_rql_role_with_vid(self): - context = self.context() - out = rest_publish(context, ':rql:`Any X WHERE X is CWUser:table`') - self.assertTrue(out.endswith('anon' - '\n

\n')) + with self.admin_access.web_request() as req: + context = self.context(req) + out = rest_publish(context, ':rql:`Any X WHERE X is CWUser:table`') + self.assertTrue(out.endswith('anon\n' + '

\n')) def test_rql_role_with_vid_empty_rset(self): - context = self.context() - out = rest_publish(context, ':rql:`Any X WHERE X is CWUser, X login "nono":table`') - self.assertTrue(out.endswith('

No result matching query
\n

\n')) + with self.admin_access.web_request() as req: + context = self.context(req) + out = rest_publish(context, ':rql:`Any X WHERE X is CWUser, X login "nono":table`') + self.assertTrue(out.endswith('

' + 'No result matching query
\n

\n')) def test_rql_role_with_unknown_vid(self): - context = self.context() - out = rest_publish(context, ':rql:`Any X WHERE X is CWUser:toto`') - self.assertTrue(out.startswith("

an error occurred while interpreting this rql directive: ObjectNotFound(u'toto',)

")) + with self.admin_access.web_request() as req: + context = self.context(req) + out = rest_publish(context, ':rql:`Any X WHERE X is CWUser:toto`') + self.assertTrue(out.startswith("

an error occurred while interpreting this " + "rql directive: ObjectNotFound(u'toto',)

")) def test_rql_role_without_vid(self): - context = self.context() - out = rest_publish(context, ':rql:`Any X WHERE X is CWUser`') - self.assertEqual(out, u'

CWUser_plural

\n') + with self.admin_access.web_request() as req: + context = self.context(req) + out = rest_publish(context, ':rql:`Any X WHERE X is CWUser`') + self.assertEqual(out, u'

CWUser_plural

' + 'admin' + '
' + 'anon' + '

\n') def test_bookmark_role(self): - context = self.context() - rset = self.execute('INSERT Bookmark X: X title "hello", X path "/view?rql=Any X WHERE X is CWUser"') - eid = rset[0][0] - out = rest_publish(context, ':bookmark:`%s`' % eid) - self.assertEqual(out, u'

CWUser_plural

\n') + with self.admin_access.web_request() as req: + context = self.context(req) + rset = req.execute('INSERT Bookmark X: X title "hello", X path ' + '"/view?rql=Any X WHERE X is CWUser"') + eid = rset[0][0] + out = rest_publish(context, ':bookmark:`%s`' % eid) + self.assertEqual(out, u'

CWUser_plural

\n') def test_rqltable_nocontent(self): - context = self.context() - out = rest_publish(context, """.. rql-table::""") - self.assertIn("System Message: ERROR", out) - self.assertIn("Content block expected for the "rql-table" " - "directive; none found" , out) + with self.admin_access.web_request() as req: + context = self.context(req) + out = rest_publish(context, """.. rql-table::""") + self.assertIn("System Message: ERROR", out) + self.assertIn("Content block expected for the "rql-table" " + "directive; none found" , out) def test_rqltable_norset(self): - context = self.context() - rql = "Any X WHERE X is CWUser, X firstname 'franky'" - out = rest_publish( - context, """\ + with self.admin_access.web_request() as req: + context = self.context(req) + rql = "Any X WHERE X is CWUser, X firstname 'franky'" + out = rest_publish( + context, """\ .. rql-table:: - %(rql)s""" % {'rql': rql}) - self.assertIn("System Message: WARNING", out) - self.assertIn("empty result set", out) + %(rql)s""" % {'rql': rql}) + self.assertIn("System Message: WARNING", out) + self.assertIn("empty result set", out) def test_rqltable_nooptions(self): - rql = """Any S,F,L WHERE X is CWUser, X surname S, - X firstname F, X login L""" - out = rest_publish( - self.context(), """\ + with self.admin_access.web_request() as req: + rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" + out = rest_publish( + self.context(req), """\ .. rql-table:: %(rql)s - """ % {'rql': rql}) - req = self.request() - view = self.vreg['views'].select('table', req, rset=req.execute(rql)) - self.assertEqual(view.render(w=None)[49:], out[49:]) + """ % {'rql': rql}) + view = self.vreg['views'].select('table', req, rset=req.execute(rql)) + self.assertEqual(view.render(w=None)[49:], out[49:]) def test_rqltable_vid(self): - rql = """Any S,F,L WHERE X is CWUser, X surname S, - X firstname F, X login L""" - vid = 'mytable' - out = rest_publish( - self.context(), """\ + with self.admin_access.web_request() as req: + rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" + vid = 'mytable' + out = rest_publish( + self.context(req), """\ .. rql-table:: :vid: %(vid)s %(rql)s - """ % {'rql': rql, 'vid': vid}) - req = self.request() - view = self.vreg['views'].select(vid, req, rset=req.execute(rql)) - self.assertEqual(view.render(w=None)[49:], out[49:]) - self.assertIn(vid, out[:49]) + """ % {'rql': rql, 'vid': vid}) + view = self.vreg['views'].select(vid, req, rset=req.execute(rql)) + self.assertEqual(view.render(w=None)[49:], out[49:]) + self.assertIn(vid, out[:49]) def test_rqltable_badvid(self): - rql = """Any S,F,L WHERE X is CWUser, X surname S, - X firstname F, X login L""" - vid = 'mytabel' - out = rest_publish( - self.context(), """\ + with self.admin_access.web_request() as req: + rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" + vid = 'mytabel' + out = rest_publish( + self.context(req), """\ .. rql-table:: :vid: %(vid)s %(rql)s - """ % {'rql': rql, 'vid': vid}) - self.assertIn("fail to select '%s' view" % vid, out) + """ % {'rql': rql, 'vid': vid}) + self.assertIn("fail to select '%s' view" % vid, out) def test_rqltable_headers(self): - rql = """Any S,F,L WHERE X is CWUser, X surname S, - X firstname F, X login L""" - headers = ["nom", "prenom", "identifiant"] - out = rest_publish( - self.context(), """\ -.. rql-table:: - :headers: %(headers)s - - %(rql)s - """ % {'rql': rql, 'headers': ', '.join(headers)}) - req = self.request() - view = self.vreg['views'].select('table', req, rset=req.execute(rql)) - view.headers = headers - self.assertEqual(view.render(w=None)[49:], out[49:]) - - def test_rqltable_headers_missing(self): - rql = """Any S,F,L WHERE X is CWUser, X surname S, - X firstname F, X login L""" - headers = ["nom", "", "identifiant"] - out = rest_publish( - self.context(), """\ + with self.admin_access.web_request() as req: + rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" + headers = ["nom", "prenom", "identifiant"] + out = rest_publish( + self.context(req), """\ .. rql-table:: :headers: %(headers)s %(rql)s - """ % {'rql': rql, 'headers': ', '.join(headers)}) - req = self.request() - view = self.vreg['views'].select('table', req, rset=req.execute(rql)) - view.headers = [headers[0], None, headers[2]] - self.assertEqual(view.render(w=None)[49:], out[49:]) + """ % {'rql': rql, 'headers': ', '.join(headers)}) + view = self.vreg['views'].select('table', req, rset=req.execute(rql)) + view.headers = headers + self.assertEqual(view.render(w=None)[49:], out[49:]) - def test_rqltable_headers_missing_edges(self): - rql = """Any S,F,L WHERE X is CWUser, X surname S, - X firstname F, X login L""" - headers = [" ", "prenom", ""] - out = rest_publish( - self.context(), """\ + def test_rqltable_headers_missing(self): + with self.admin_access.web_request() as req: + rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" + headers = ["nom", "", "identifiant"] + out = rest_publish( + self.context(req), """\ .. rql-table:: :headers: %(headers)s %(rql)s - """ % {'rql': rql, 'headers': ', '.join(headers)}) - req = self.request() - view = self.vreg['views'].select('table', req, rset=req.execute(rql)) - view.headers = [None, headers[1], None] - self.assertEqual(view.render(w=None)[49:], out[49:]) + """ % {'rql': rql, 'headers': ', '.join(headers)}) + view = self.vreg['views'].select('table', req, rset=req.execute(rql)) + view.headers = [headers[0], None, headers[2]] + self.assertEqual(view.render(w=None)[49:], out[49:]) + + def test_rqltable_headers_missing_edges(self): + with self.admin_access.web_request() as req: + rql = "Any S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" + headers = [" ", "prenom", ""] + out = rest_publish( + self.context(req), """\ +.. rql-table:: + :headers: %(headers)s + + %(rql)s + """ % {'rql': rql, 'headers': ', '.join(headers)}) + view = self.vreg['views'].select('table', req, rset=req.execute(rql)) + view.headers = [None, headers[1], None] + self.assertEqual(view.render(w=None)[49:], out[49:]) def test_rqltable_colvids(self): - rql = """Any X,S,F,L WHERE X is CWUser, X surname S, - X firstname F, X login L""" - colvids = {0: "oneline"} - out = rest_publish( - self.context(), """\ + with self.admin_access.web_request() as req: + rql = "Any X,S,F,L WHERE X is CWUser, X surname S, X firstname F, X login L" + colvids = {0: "oneline"} + out = rest_publish( + self.context(req), """\ .. rql-table:: :colvids: %(colvids)s %(rql)s - """ % {'rql': rql, - 'colvids': ', '.join(["%d=%s" % (k, v) - for k, v in colvids.iteritems()]) - }) - req = self.request() - view = self.vreg['views'].select('table', req, rset=req.execute(rql)) - view.cellvids = colvids - self.assertEqual(view.render(w=None)[49:], out[49:]) + """ % {'rql': rql, + 'colvids': ', '.join(["%d=%s" % (k, v) + for k, v in colvids.iteritems()]) + }) + view = self.vreg['views'].select('table', req, rset=req.execute(rql)) + view.cellvids = colvids + self.assertEqual(view.render(w=None)[49:], out[49:]) if __name__ == '__main__': diff -r 84738d495ffd -r 793377697c81 hooks/__init__.py --- a/hooks/__init__.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/__init__.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -34,18 +34,11 @@ lifetime = timedelta(days=self.repo.config['keep-transaction-lifetime']) def cleanup_old_transactions(repo=self.repo, lifetime=lifetime): mindate = datetime.now() - lifetime - session = repo.internal_session() - try: - session.system_sql( + with repo.internal_cnx() as cnx: + cnx.system_sql( 'DELETE FROM transactions WHERE tx_time < %(time)s', {'time': mindate}) - # cleanup deleted entities - session.system_sql( - 'DELETE FROM deleted_entities WHERE dtime < %(time)s', - {'time': mindate}) - session.commit() - finally: - session.close() + cnx.commit() if self.repo.config['undo-enabled']: self.repo.looping_task(60*60*24, cleanup_old_transactions, self.repo) @@ -57,22 +50,18 @@ def __call__(self): def update_feeds(repo): - # don't iter on repo.sources which doesn't include copy based - # sources (the one we're looking for) - # take a list to avoid iterating on a dictionary which size may + # take a list to avoid iterating on a dictionary whose size may # change - for source in list(repo.sources_by_eid.values()): - if (not source.copy_based_source + for uri, source in list(repo.sources_by_uri.iteritems()): + if (uri == 'system' or not repo.config.source_enabled(source) or not source.config['synchronize']): continue - session = repo.internal_session(safe=True) - try: - source.pull_data(session) - except Exception as exc: - session.exception('while trying to update feed %s', source) - finally: - session.close() + with repo.internal_cnx() as cnx: + try: + source.pull_data(cnx) + except Exception as exc: + cnx.exception('while trying to update feed %s', source) self.repo.looping_task(60, update_feeds, self.repo) @@ -83,16 +72,13 @@ def __call__(self): def expire_dataimports(repo=self.repo): - for source in repo.sources_by_eid.itervalues(): - if (not source.copy_based_source + for uri, source in repo.sources_by_uri.iteritems(): + if (uri == 'system' or not repo.config.source_enabled(source)): continue - session = repo.internal_session() - try: + with repo.internal_cnx() as cnx: mindate = datetime.now() - timedelta(seconds=source.config['logs-lifetime']) - session.execute('DELETE CWDataImport X WHERE X start_timestamp < %(time)s', + cnx.execute('DELETE CWDataImport X WHERE X start_timestamp < %(time)s', {'time': mindate}) - session.commit() - finally: - session.close() + cnx.commit() self.repo.looping_task(60*60*24, expire_dataimports, self.repo) diff -r 84738d495ffd -r 793377697c81 hooks/bookmark.py --- a/hooks/bookmark.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/bookmark.py Wed Sep 24 18:04:30 2014 +0200 @@ -25,7 +25,7 @@ class AutoDeleteBookmarkOp(hook.Operation): bookmark = None # make pylint happy def precommit_event(self): - if not self.session.deleted_in_transaction(self.bookmark.eid): + if not self.cnx.deleted_in_transaction(self.bookmark.eid): if not self.bookmark.bookmarked_by: self.bookmark.cw_delete() diff -r 84738d495ffd -r 793377697c81 hooks/email.py --- a/hooks/email.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/email.py Wed Sep 24 18:04:30 2014 +0200 @@ -36,7 +36,7 @@ def precommit_event(self): if self.condition(): - self.session.execute( + self.cnx.execute( 'SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % self.rtype, {'x': self.entity.eid, 'y': self.email.eid}) diff -r 84738d495ffd -r 793377697c81 hooks/integrity.py --- a/hooks/integrity.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/integrity.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -24,10 +24,10 @@ from threading import Lock -from cubicweb import validation_error +from cubicweb import validation_error, neg_role from cubicweb.schema import (META_RTYPES, WORKFLOW_RTYPES, RQLConstraint, RQLUniqueConstraint) -from cubicweb.predicates import is_instance +from cubicweb.predicates import is_instance, composite_etype from cubicweb.uilib import soup2xhtml from cubicweb.server import hook @@ -40,30 +40,30 @@ _UNIQUE_CONSTRAINTS_HOLDER = None -def _acquire_unique_cstr_lock(session): - """acquire the _UNIQUE_CONSTRAINTS_LOCK for the session. +def _acquire_unique_cstr_lock(cnx): + """acquire the _UNIQUE_CONSTRAINTS_LOCK for the cnx. This lock used to avoid potential integrity pb when checking RQLUniqueConstraint in two different transactions, as explained in http://intranet.logilab.fr/jpl/ticket/36564 """ - if 'uniquecstrholder' in session.transaction_data: + if 'uniquecstrholder' in cnx.transaction_data: return _UNIQUE_CONSTRAINTS_LOCK.acquire() - session.transaction_data['uniquecstrholder'] = True + cnx.transaction_data['uniquecstrholder'] = True # register operation responsible to release the lock on commit/rollback - _ReleaseUniqueConstraintsOperation(session) + _ReleaseUniqueConstraintsOperation(cnx) -def _release_unique_cstr_lock(session): - if 'uniquecstrholder' in session.transaction_data: - del session.transaction_data['uniquecstrholder'] +def _release_unique_cstr_lock(cnx): + if 'uniquecstrholder' in cnx.transaction_data: + del cnx.transaction_data['uniquecstrholder'] _UNIQUE_CONSTRAINTS_LOCK.release() class _ReleaseUniqueConstraintsOperation(hook.Operation): def postcommit_event(self): - _release_unique_cstr_lock(self.session) + _release_unique_cstr_lock(self.cnx) def rollback_event(self): - _release_unique_cstr_lock(self.session) + _release_unique_cstr_lock(self.cnx) class _CheckRequiredRelationOperation(hook.DataOperationMixIn, @@ -75,17 +75,17 @@ role = key = base_rql = None def precommit_event(self): - session = self.session - pendingeids = session.transaction_data.get('pendingeids', ()) - pendingrtypes = session.transaction_data.get('pendingrtypes', ()) + cnx = self.cnx + pendingeids = cnx.transaction_data.get('pendingeids', ()) + pendingrtypes = cnx.transaction_data.get('pendingrtypes', ()) for eid, rtype in self.get_data(): # recheck pending eids / relation types if eid in pendingeids: continue if rtype in pendingrtypes: continue - if not session.execute(self.base_rql % rtype, {'x': eid}): - etype = session.describe(eid)[0] + if not cnx.execute(self.base_rql % rtype, {'x': eid}): + etype = cnx.entity_metas(eid)['type'] msg = _('at least one relation %(rtype)s is required on ' '%(etype)s (%(eid)s)') raise validation_error(eid, {(rtype, self.role): msg}, @@ -142,16 +142,16 @@ rtype = self.rtype if rtype in DONT_CHECK_RTYPES_ON_DEL: return - session = self._cw + cnx = self._cw eidfrom, eidto = self.eidfrom, self.eidto - rdef = session.rtype_eids_rdef(rtype, eidfrom, eidto) - if (rdef.subject, rtype, rdef.object) in session.transaction_data.get('pendingrdefs', ()): + rdef = cnx.rtype_eids_rdef(rtype, eidfrom, eidto) + if (rdef.subject, rtype, rdef.object) in cnx.transaction_data.get('pendingrdefs', ()): return card = rdef.cardinality - if card[0] in '1+' and not session.deleted_in_transaction(eidfrom): - _CheckSRelationOp.get_instance(session).add_data((eidfrom, rtype)) - if card[1] in '1+' and not session.deleted_in_transaction(eidto): - _CheckORelationOp.get_instance(session).add_data((eidto, rtype)) + if card[0] in '1+' and not cnx.deleted_in_transaction(eidfrom): + _CheckSRelationOp.get_instance(cnx).add_data((eidfrom, rtype)) + if card[1] in '1+' and not cnx.deleted_in_transaction(eidto): + _CheckORelationOp.get_instance(cnx).add_data((eidto, rtype)) class CheckCardinalityHookAfterAddEntity(IntegrityHook): @@ -179,14 +179,14 @@ """ check a new relation satisfy its constraints """ containercls = list def precommit_event(self): - session = self.session + cnx = self.cnx for values in self.get_data(): eidfrom, rtype, eidto, constraints = values # first check related entities have not been deleted in the same # transaction - if session.deleted_in_transaction(eidfrom): + if cnx.deleted_in_transaction(eidfrom): continue - if session.deleted_in_transaction(eidto): + if cnx.deleted_in_transaction(eidto): continue for constraint in constraints: # XXX @@ -194,9 +194,9 @@ # * use a constraint id to use per constraint lock and avoid # unnecessary commit serialization ? if isinstance(constraint, RQLUniqueConstraint): - _acquire_unique_cstr_lock(session) + _acquire_unique_cstr_lock(cnx) try: - constraint.repo_check(session, eidfrom, rtype, eidto) + constraint.repo_check(cnx, eidfrom, rtype, eidto) except NotImplementedError: self.critical('can\'t check constraint %s, not supported', constraint) @@ -309,69 +309,27 @@ self.entity.cw_edited['login'] = login.strip() -# 'active' integrity hooks: you usually don't want to deactivate them, they are -# not really integrity check, they maintain consistency on changes - -class _DelayedDeleteOp(hook.DataOperationMixIn, hook.Operation): - """delete the object of composite relation except if the relation has - actually been redirected to another composite - """ - base_rql = None - - def precommit_event(self): - session = self.session - pendingeids = session.transaction_data.get('pendingeids', ()) - eids_by_etype_rtype = {} - for eid, rtype in self.get_data(): - # don't do anything if the entity is being deleted - if eid not in pendingeids: - etype = session.describe(eid)[0] - key = (etype, rtype) - if key not in eids_by_etype_rtype: - eids_by_etype_rtype[key] = [str(eid)] - else: - eids_by_etype_rtype[key].append(str(eid)) - for (etype, rtype), eids in eids_by_etype_rtype.iteritems(): - # quite unexpectedly, not deleting too many entities at a time in - # this operation benefits to the exec speed (possibly on the RQL - # parsing side) - start = 0 - incr = 500 - while start < len(eids): - session.execute(self.base_rql % (etype, ','.join(eids[start:start+incr]), rtype)) - start += incr - -class _DelayedDeleteSEntityOp(_DelayedDeleteOp): - """delete orphan subject entity of a composite relation""" - base_rql = 'DELETE %s X WHERE X eid IN (%s), NOT X %s Y' - -class _DelayedDeleteOEntityOp(_DelayedDeleteOp): - """check required object relation""" - base_rql = 'DELETE %s X WHERE X eid IN (%s), NOT Y %s X' - - class DeleteCompositeOrphanHook(hook.Hook): - """delete the composed of a composite relation when this relation is deleted + """Delete the composed of a composite relation when the composite is + deleted (this is similar to the cascading ON DELETE CASCADE + semantics of sql). """ __regid__ = 'deletecomposite' - events = ('before_delete_relation',) + __select__ = hook.Hook.__select__ & composite_etype() + events = ('before_delete_entity',) category = 'activeintegrity' + # give the application's before_delete_entity hooks a chance to run before we cascade + order = 99 def __call__(self): - # if the relation is being delete, don't delete composite's components - # automatically - session = self._cw - rtype = self.rtype - rdef = session.rtype_eids_rdef(rtype, self.eidfrom, self.eidto) - if (rdef.subject, rtype, rdef.object) in session.transaction_data.get('pendingrdefs', ()): - return - composite = rdef.composite - if composite == 'subject': - _DelayedDeleteOEntityOp.get_instance(self._cw).add_data( - (self.eidto, rtype)) - elif composite == 'object': - _DelayedDeleteSEntityOp.get_instance(self._cw).add_data( - (self.eidfrom, rtype)) + eid = self.entity.eid + for rdef, role in self.entity.e_schema.composite_rdef_roles: + rtype = rdef.rtype.type + target = getattr(rdef, neg_role(role)) + expr = ('C %s X' % rtype) if role == 'subject' else ('X %s C' % rtype) + self._cw.execute('DELETE %s X WHERE C eid %%(c)s, %s' % (target, expr), + {'c': eid}) + def registration_callback(vreg): vreg.register_all(globals().values(), __name__) diff -r 84738d495ffd -r 793377697c81 hooks/metadata.py --- a/hooks/metadata.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/metadata.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -46,7 +46,7 @@ edited['creation_date'] = timestamp if not edited.get('modification_date'): edited['modification_date'] = timestamp - if not self._cw.get_shared_data('do-not-insert-cwuri'): + if not self._cw.transaction_data.get('do-not-insert-cwuri'): cwuri = u'%s%s' % (self._cw.base_url(), self.entity.eid) edited.setdefault('cwuri', cwuri) @@ -69,14 +69,14 @@ class SetCreatorOp(hook.DataOperationMixIn, hook.Operation): def precommit_event(self): - session = self.session - relations = [(eid, session.user.eid) for eid in self.get_data() + cnx = self.cnx + relations = [(eid, cnx.user.eid) for eid in self.get_data() # don't consider entities that have been created and deleted in # the same transaction, nor ones where created_by has been # explicitly set - if not session.deleted_in_transaction(eid) and \ - not session.entity_from_eid(eid).created_by] - session.add_relations([('created_by', relations)]) + if not cnx.deleted_in_transaction(eid) and \ + not cnx.entity_from_eid(eid).created_by] + cnx.add_relations([('created_by', relations)]) class SetOwnershipHook(MetaDataHook): @@ -93,7 +93,7 @@ class SyncOwnersOp(hook.DataOperationMixIn, hook.Operation): def precommit_event(self): for compositeeid, composedeid in self.get_data(): - self.session.execute('SET X owned_by U WHERE C owned_by U, C eid %(c)s,' + self.cnx.execute('SET X owned_by U WHERE C owned_by U, C eid %(c)s,' 'NOT EXISTS(X owned_by U, X eid %(x)s)', {'c': compositeeid, 'x': composedeid}) @@ -136,14 +136,14 @@ def __call__(self): rtype = self.rtype - session = self._cw - ftcontainer = session.vreg.schema.rschema(rtype).fulltext_container + cnx = self._cw + ftcontainer = cnx.vreg.schema.rschema(rtype).fulltext_container if ftcontainer == 'subject': - session.repo.system_source.index_entity( - session, session.entity_from_eid(self.eidfrom)) + cnx.repo.system_source.index_entity( + cnx, cnx.entity_from_eid(self.eidfrom)) elif ftcontainer == 'object': - session.repo.system_source.index_entity( - session, session.entity_from_eid(self.eidto)) + cnx.repo.system_source.index_entity( + cnx, cnx.entity_from_eid(self.eidto)) @@ -154,16 +154,13 @@ def postcommit_event(self): self.oldsource.reset_caches() - repo = self.session.repo + repo = self.cnx.repo entity = self.entity extid = entity.cw_metainformation()['extid'] repo._type_source_cache[entity.eid] = ( - entity.cw_etype, self.newsource.uri, None, self.newsource.uri) - if self.oldsource.copy_based_source: - uri = 'system' - else: - uri = self.oldsource.uri - repo._extid_cache[(extid, uri)] = -entity.eid + entity.cw_etype, None, self.newsource.uri) + repo._extid_cache[extid] = -entity.eid + class ChangeEntitySourceDeleteHook(MetaDataHook): """support for moving an entity from an external source by watching 'Any @@ -197,16 +194,6 @@ syssource = newsource.repo_source oldsource = self._cw.entity_from_eid(schange[self.eidfrom]) entity = self._cw.entity_from_eid(self.eidfrom) - # copy entity if necessary - if not oldsource.repo_source.copy_based_source: - entity.complete(skip_bytes=False, skip_pwd=False) - if not entity.creation_date: - entity.cw_attr_cache['creation_date'] = datetime.now() - if not entity.modification_date: - entity.cw_attr_cache['modification_date'] = datetime.now() - entity.cw_attr_cache['cwuri'] = u'%s%s' % (self._cw.base_url(), entity.eid) - entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache) - syssource.add_entity(self._cw, entity) # we don't want the moved entity to be reimported later. To # distinguish this state, the trick is to change the associated # record in the 'entities' system table with eid=-eid while leaving @@ -217,8 +204,7 @@ self._cw.system_sql('UPDATE entities SET eid=-eid WHERE eid=%(eid)s', {'eid': self.eidfrom}) attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': None, - 'source': 'system', 'asource': 'system', - 'mtime': datetime.now()} + 'asource': 'system'} self._cw.system_sql(syssource.sqlgen.insert('entities', attrs), attrs) # register an operation to update repository/sources caches ChangeEntitySourceUpdateCaches(self._cw, entity=entity, diff -r 84738d495ffd -r 793377697c81 hooks/notification.py --- a/hooks/notification.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/notification.py Wed Sep 24 18:04:30 2014 +0200 @@ -29,11 +29,11 @@ @deprecated('[3.17] use notify_on_commit instead') -def RenderAndSendNotificationView(session, view, viewargs=None): - notify_on_commit(session, view, viewargs) +def RenderAndSendNotificationView(cnx, view, viewargs=None): + notify_on_commit(cnx, view, viewargs) -def notify_on_commit(session, view, viewargs=None): +def notify_on_commit(cnx, view, viewargs=None): """register a notification view (see :class:`~cubicweb.sobjects.notification.NotificationView`) to be sent at post-commit time, ie only if the transaction has succeeded. @@ -43,7 +43,7 @@ """ if viewargs is None: viewargs = {} - notif_op = _RenderAndSendNotificationOp.get_instance(session) + notif_op = _RenderAndSendNotificationOp.get_instance(cnx) notif_op.add_data((view, viewargs)) @@ -58,7 +58,7 @@ containercls = list def postcommit_event(self): - deleted = self.session.deleted_in_transaction + deleted = self.cnx.deleted_in_transaction for view, viewargs in self.get_data(): if view.cw_rset is not None: if not view.cw_rset: @@ -153,13 +153,13 @@ def precommit_event(self): # precommit event that creates postcommit operation - session = self.session - for eid in session.transaction_data['changes']: - view = session.vreg['views'].select('notif_entity_updated', session, - rset=session.eid_rset(eid), - row=0) - notify_on_commit(self.session, view, - viewargs={'changes': session.transaction_data['changes'][eid]}) + cnx = self.cnx + for eid in cnx.transaction_data['changes']: + view = cnx.vreg['views'].select('notif_entity_updated', cnx, + rset=cnx.eid_rset(eid), + row=0) + notify_on_commit(self.cnx, view, + viewargs={'changes': cnx.transaction_data['changes'][eid]}) class EntityUpdateHook(NotificationHook): @@ -170,15 +170,15 @@ skip_attrs = set() def __call__(self): - session = self._cw - if session.added_in_transaction(self.entity.eid): + cnx = self._cw + if cnx.added_in_transaction(self.entity.eid): return # entity is being created # then compute changes attrs = [k for k in self.entity.cw_edited if not k in self.skip_attrs] if not attrs: return - changes = session.transaction_data.setdefault('changes', {}) + changes = cnx.transaction_data.setdefault('changes', {}) thisentitychanges = changes.setdefault(self.entity.eid, set()) rqlsel, rqlrestr = [], ['X eid %(x)s'] for i, attr in enumerate(attrs): @@ -186,14 +186,14 @@ rqlsel.append(var) rqlrestr.append('X %s %s' % (attr, var)) rql = 'Any %s WHERE %s' % (','.join(rqlsel), ','.join(rqlrestr)) - rset = session.execute(rql, {'x': self.entity.eid}) + rset = cnx.execute(rql, {'x': self.entity.eid}) for i, attr in enumerate(attrs): oldvalue = rset[0][i] newvalue = self.entity.cw_edited[attr] if oldvalue != newvalue: thisentitychanges.add((attr, oldvalue, newvalue)) if thisentitychanges: - EntityUpdatedNotificationOp(session) + EntityUpdatedNotificationOp(cnx) # supervising ################################################################## diff -r 84738d495ffd -r 793377697c81 hooks/security.py --- a/hooks/security.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/security.py Wed Sep 24 18:04:30 2014 +0200 @@ -16,7 +16,7 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """Security hooks: check permissions to add/delete/update entities according to -the user connected to a session +the connected user """ __docformat__ = "restructuredtext en" @@ -31,7 +31,7 @@ -def check_entity_attributes(session, entity, action, editedattrs=None): +def check_entity_attributes(cnx, entity, action, editedattrs=None): eid = entity.eid eschema = entity.e_schema if action == 'delete': @@ -67,24 +67,24 @@ # That means an immutable attribute; as an optimization, avoid # going through check_perm. raise Unauthorized(action, str(rdef)) - rdef.check_perm(session, action, eid=eid) + rdef.check_perm(cnx, action, eid=eid) class CheckEntityPermissionOp(hook.DataOperationMixIn, hook.LateOperation): def precommit_event(self): - session = self.session + cnx = self.cnx for eid, action, edited in self.get_data(): - entity = session.entity_from_eid(eid) - check_entity_attributes(session, entity, action, edited) + entity = cnx.entity_from_eid(eid) + check_entity_attributes(cnx, entity, action, edited) class CheckRelationPermissionOp(hook.DataOperationMixIn, hook.LateOperation): def precommit_event(self): - session = self.session + cnx = self.cnx for action, rschema, eidfrom, eidto in self.get_data(): - rdef = rschema.rdef(session.describe(eidfrom)[0], - session.describe(eidto)[0]) - rdef.check_perm(session, action, fromeid=eidfrom, toeid=eidto) + rdef = rschema.rdef(cnx.entity_metas(eidfrom)['type'], + cnx.entity_metas(eidto)['type']) + rdef.check_perm(cnx, action, fromeid=eidfrom, toeid=eidto) @objectify_predicate @@ -138,8 +138,8 @@ if (self.eidfrom, self.rtype, self.eidto) in nocheck: return rschema = self._cw.repo.schema[self.rtype] - rdef = rschema.rdef(self._cw.describe(self.eidfrom)[0], - self._cw.describe(self.eidto)[0]) + rdef = rschema.rdef(self._cw.entity_metas(self.eidfrom)['type'], + self._cw.entity_metas(self.eidto)['type']) rdef.check_perm(self._cw, 'add', fromeid=self.eidfrom, toeid=self.eidto) @@ -157,8 +157,8 @@ CheckRelationPermissionOp.get_instance(self._cw).add_data( ('add', rschema, self.eidfrom, self.eidto) ) else: - rdef = rschema.rdef(self._cw.describe(self.eidfrom)[0], - self._cw.describe(self.eidto)[0]) + rdef = rschema.rdef(self._cw.entity_metas(self.eidfrom)['type'], + self._cw.entity_metas(self.eidto)['type']) rdef.check_perm(self._cw, 'add', fromeid=self.eidfrom, toeid=self.eidto) @@ -171,7 +171,7 @@ if (self.eidfrom, self.rtype, self.eidto) in nocheck: return rschema = self._cw.repo.schema[self.rtype] - rdef = rschema.rdef(self._cw.describe(self.eidfrom)[0], - self._cw.describe(self.eidto)[0]) + rdef = rschema.rdef(self._cw.entity_metas(self.eidfrom)['type'], + self._cw.entity_metas(self.eidto)['type']) rdef.check_perm(self._cw, 'delete', fromeid=self.eidfrom, toeid=self.eidto) diff -r 84738d495ffd -r 793377697c81 hooks/syncschema.py --- a/hooks/syncschema.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/syncschema.py Wed Sep 24 18:04:30 2014 +0200 @@ -44,10 +44,10 @@ ('CWUser', 'CWGroup','login', 'upassword', 'name', 'in_group')) -def get_constraints(session, entity): +def get_constraints(cnx, entity): constraints = [] - for cstreid in session.transaction_data.get(entity.eid, ()): - cstrent = session.entity_from_eid(cstreid) + for cstreid in cnx.transaction_data.get(entity.eid, ()): + cstrent = cnx.entity_from_eid(cstreid) cstr = CONSTRAINTS[cstrent.type].deserialize(cstrent.value) cstr.eid = cstreid constraints.append(cstr) @@ -60,32 +60,32 @@ cw.transaction_data['groupmap'] = gmap = ss.group_mapping(cw) return gmap -def add_inline_relation_column(session, etype, rtype): +def add_inline_relation_column(cnx, etype, rtype): """add necessary column and index for an inlined relation""" attrkey = '%s.%s' % (etype, rtype) - createdattrs = session.transaction_data.setdefault('createdattrs', set()) + createdattrs = cnx.transaction_data.setdefault('createdattrs', set()) if attrkey in createdattrs: return createdattrs.add(attrkey) table = SQL_PREFIX + etype column = SQL_PREFIX + rtype try: - session.system_sql(str('ALTER TABLE %s ADD %s integer' + cnx.system_sql(str('ALTER TABLE %s ADD %s integer' % (table, column)), rollback_on_failure=False) - session.info('added column %s to table %s', column, table) + cnx.info('added column %s to table %s', column, table) except Exception: # silent exception here, if this error has not been raised because the # column already exists, index creation will fail anyway - session.exception('error while adding column %s to table %s', + cnx.exception('error while adding column %s to table %s', table, column) # create index before alter table which may expectingly fail during test # (sqlite) while index creation should never fail (test for index existence # is done by the dbhelper) - session.cnxset.source('system').create_index(session, table, column) - session.info('added index on %s(%s)', table, column) + cnx.repo.system_source.create_index(cnx, table, column) + cnx.info('added index on %s(%s)', table, column) -def insert_rdef_on_subclasses(session, eschema, rschema, rdefdef, props): +def insert_rdef_on_subclasses(cnx, eschema, rschema, rdefdef, props): # XXX 'infered': True/False, not clear actually props.update({'constraints': rdefdef.constraints, 'description': rdefdef.description, @@ -94,19 +94,19 @@ 'order': rdefdef.order, 'infered': False, 'eid': None }) - cstrtypemap = ss.cstrtype_mapping(session) - groupmap = group_mapping(session) + cstrtypemap = ss.cstrtype_mapping(cnx) + groupmap = group_mapping(cnx) object = rschema.schema.eschema(rdefdef.object) for specialization in eschema.specialized_by(False): if (specialization, rdefdef.object) in rschema.rdefs: continue sperdef = RelationDefinitionSchema(specialization, rschema, object, None, values=props) - ss.execschemarql(session.execute, sperdef, + ss.execschemarql(cnx.execute, sperdef, ss.rdef2rql(sperdef, cstrtypemap, groupmap)) -def check_valid_changes(session, entity, ro_attrs=('name', 'final')): +def check_valid_changes(cnx, entity, ro_attrs=('name', 'final')): errors = {} # don't use getattr(entity, attr), we would get the modified value if any for attr in entity.cw_edited: @@ -137,22 +137,22 @@ """actually remove a database from the instance's schema""" table = None # make pylint happy def precommit_event(self): - dropped = self.session.transaction_data.setdefault('droppedtables', + dropped = self.cnx.transaction_data.setdefault('droppedtables', set()) if self.table in dropped: return # already processed dropped.add(self.table) - self.session.system_sql('DROP TABLE %s' % self.table) + self.cnx.system_sql('DROP TABLE %s' % self.table) self.info('dropped table %s', self.table) # XXX revertprecommit_event class DropRelationTable(DropTable): - def __init__(self, session, rtype): + def __init__(self, cnx, rtype): super(DropRelationTable, self).__init__( - session, table='%s_relation' % rtype) - session.transaction_data.setdefault('pendingrtypes', set()).add(rtype) + cnx, table='%s_relation' % rtype) + cnx.transaction_data.setdefault('pendingrtypes', set()).add(rtype) class DropColumn(hook.Operation): @@ -161,12 +161,12 @@ """ table = column = None # make pylint happy def precommit_event(self): - session, table, column = self.session, self.table, self.column - source = session.repo.system_source + cnx, table, column = self.cnx, self.table, self.column + source = cnx.repo.system_source # drop index if any - source.drop_index(session, table, column) + source.drop_index(cnx, table, column) if source.dbhelper.alter_column_support: - session.system_sql('ALTER TABLE %s DROP COLUMN %s' + cnx.system_sql('ALTER TABLE %s DROP COLUMN %s' % (table, column), rollback_on_failure=False) self.info('dropped column %s from table %s', column, table) else: @@ -187,16 +187,16 @@ schema changes. """ - def __init__(self, session): - hook.SingleLastOperation.__init__(self, session) + def __init__(self, cnx): + hook.SingleLastOperation.__init__(self, cnx) def precommit_event(self): - for eschema in self.session.repo.schema.entities(): + for eschema in self.cnx.repo.schema.entities(): if not eschema.final: clear_cache(eschema, 'ordered_relations') def postcommit_event(self): - repo = self.session.repo + repo = self.cnx.repo # commit event should not raise error, while set_schema has chances to # do so because it triggers full vreg reloading try: @@ -204,7 +204,7 @@ # trigger vreg reload repo.set_schema(repo.schema) # CWUser class might have changed, update current session users - cwuser_cls = self.session.vreg['etypes'].etype_class('CWUser') + cwuser_cls = self.cnx.vreg['etypes'].etype_class('CWUser') for session in repo._sessions.itervalues(): session.user.__class__ = cwuser_cls except Exception: @@ -216,10 +216,10 @@ class MemSchemaOperation(hook.Operation): """base class for schema operations""" - def __init__(self, session, **kwargs): - hook.Operation.__init__(self, session, **kwargs) + def __init__(self, cnx, **kwargs): + hook.Operation.__init__(self, cnx, **kwargs) # every schema operation is triggering a schema update - MemSchemaNotifyChanges(session) + MemSchemaNotifyChanges(cnx) # operations for high-level source database alteration ######################## @@ -235,21 +235,21 @@ entity = None # make pylint happy def precommit_event(self): - session = self.session + cnx = self.cnx entity = self.entity - schema = session.vreg.schema + schema = cnx.vreg.schema etype = ybo.EntityType(eid=entity.eid, name=entity.name, description=entity.description) eschema = schema.add_entity_type(etype) # create the necessary table - tablesql = y2sql.eschema2sql(session.cnxset.source('system').dbhelper, + tablesql = y2sql.eschema2sql(cnx.repo.system_source.dbhelper, eschema, prefix=SQL_PREFIX) for sql in tablesql.split(';'): if sql.strip(): - session.system_sql(sql) + cnx.system_sql(sql) # add meta relations - gmap = group_mapping(session) - cmap = ss.cstrtype_mapping(session) + gmap = group_mapping(cnx) + cmap = ss.cstrtype_mapping(cnx) for rtype in (META_RTYPES - VIRTUAL_RTYPES): try: rschema = schema[rtype] @@ -268,13 +268,13 @@ continue rdef.subject = _MockEntity(eid=entity.eid) mock = _MockEntity(eid=None) - ss.execschemarql(session.execute, mock, ss.rdef2rql(rdef, cmap, gmap)) + ss.execschemarql(cnx.execute, mock, ss.rdef2rql(rdef, cmap, gmap)) def revertprecommit_event(self): # revert changes on in memory schema - self.session.vreg.schema.del_entity_type(self.entity.name) + self.cnx.vreg.schema.del_entity_type(self.entity.name) # revert changes on database - self.session.system_sql('DROP TABLE %s%s' % (SQL_PREFIX, self.entity.name)) + self.cnx.system_sql('DROP TABLE %s%s' % (SQL_PREFIX, self.entity.name)) class CWETypeRenameOp(MemSchemaOperation): @@ -282,21 +282,19 @@ oldname = newname = None # make pylint happy def rename(self, oldname, newname): - self.session.vreg.schema.rename_entity_type(oldname, newname) + self.cnx.vreg.schema.rename_entity_type(oldname, newname) # we need sql to operate physical changes on the system database - sqlexec = self.session.system_sql - dbhelper= self.session.cnxset.source('system').dbhelper + sqlexec = self.cnx.system_sql + dbhelper = self.cnx.repo.system_source.dbhelper sql = dbhelper.sql_rename_table(SQL_PREFIX+oldname, SQL_PREFIX+newname) sqlexec(sql) self.info('renamed table %s to %s', oldname, newname) sqlexec('UPDATE entities SET type=%(newname)s WHERE type=%(oldname)s', {'newname': newname, 'oldname': oldname}) - for eid, (etype, uri, extid, auri) in self.session.repo._type_source_cache.items(): + for eid, (etype, extid, auri) in self.cnx.repo._type_source_cache.items(): if etype == oldname: - self.session.repo._type_source_cache[eid] = (newname, uri, extid, auri) - sqlexec('UPDATE deleted_entities SET type=%(newname)s WHERE type=%(oldname)s', - {'newname': newname, 'oldname': oldname}) + self.cnx.repo._type_source_cache[eid] = (newname, extid, auri) # XXX transaction records def precommit_event(self): @@ -315,9 +313,9 @@ rschema = self.rschema if rschema.final: return # watched changes to final relation type are unexpected - session = self.session + cnx = self.cnx if 'fulltext_container' in self.values: - op = UpdateFTIndexOp.get_instance(session) + op = UpdateFTIndexOp.get_instance(cnx) for subjtype, objtype in rschema.rdefs: op.add_data(subjtype) op.add_data(objtype) @@ -332,19 +330,19 @@ if inlined: self.entity.check_inlined_allowed() # inlined changed, make necessary physical changes! - sqlexec = self.session.system_sql + sqlexec = self.cnx.system_sql rtype = rschema.type eidcolumn = SQL_PREFIX + 'eid' if not inlined: # need to create the relation if it has not been already done by # another event of the same transaction - if not rschema.type in session.transaction_data.get('createdtables', ()): + if not rschema.type in cnx.transaction_data.get('createdtables', ()): tablesql = y2sql.rschema2sql(rschema) # create the necessary table for sql in tablesql.split(';'): if sql.strip(): sqlexec(sql) - session.transaction_data.setdefault('createdtables', []).append( + cnx.transaction_data.setdefault('createdtables', []).append( rschema.type) # copy existant data column = SQL_PREFIX + rtype @@ -353,14 +351,14 @@ sqlexec('INSERT INTO %s_relation SELECT %s, %s FROM %s WHERE NOT %s IS NULL' % (rtype, eidcolumn, column, table, column)) # drop existant columns - #if session.repo.system_source.dbhelper.alter_column_support: + #if cnx.repo.system_source.dbhelper.alter_column_support: for etype in rschema.subjects(): - DropColumn(session, table=SQL_PREFIX + str(etype), + DropColumn(cnx, table=SQL_PREFIX + str(etype), column=SQL_PREFIX + rtype) else: for etype in rschema.subjects(): try: - add_inline_relation_column(session, str(etype), rtype) + add_inline_relation_column(cnx, str(etype), rtype) except Exception as ex: # the column probably already exists. this occurs when the # entity's type has just been added or if the column has not @@ -382,7 +380,7 @@ cursor.executemany('UPDATE %s SET %s=%%(val)s WHERE %s=%%(x)s' % (table, column, eidcolumn), args) # drop existant table - DropRelationTable(session, rtype) + DropRelationTable(cnx, rtype) def revertprecommit_event(self): # revert changes on in memory schema @@ -407,10 +405,10 @@ rdefdef = self.rdefdef = ybo.RelationDefinition( str(fromentity.name), entity.rtype.name, str(entity.otype.name), description=entity.description, cardinality=entity.cardinality, - constraints=get_constraints(self.session, entity), + constraints=get_constraints(self.cnx, entity), order=entity.ordernum, eid=entity.eid, **kwargs) - self.session.vreg.schema.add_relation_def(rdefdef) - self.session.execute('SET X ordernum Y+1 ' + self.cnx.vreg.schema.add_relation_def(rdefdef) + self.cnx.execute('SET X ordernum Y+1 ' 'WHERE X from_entity SE, SE eid %(se)s, X ordernum Y, ' 'X ordernum >= %(order)s, NOT X eid %(x)s', {'x': entity.eid, 'se': fromentity.eid, @@ -418,7 +416,7 @@ return rdefdef def precommit_event(self): - session = self.session + cnx = self.cnx entity = self.entity # entity.defaultval is a Binary or None, but we need a correctly typed # value @@ -432,7 +430,7 @@ # update the in-memory schema first rdefdef = self.init_rdef(**props) # then make necessary changes to the system source database - syssource = session.cnxset.source('system') + syssource = cnx.repo.system_source attrtype = y2sql.type_from_constraints( syssource.dbhelper, rdefdef.object, rdefdef.constraints) # XXX should be moved somehow into lgdb: sqlite doesn't support to @@ -448,7 +446,7 @@ table = SQL_PREFIX + rdefdef.subject column = SQL_PREFIX + rdefdef.name try: - session.system_sql(str('ALTER TABLE %s ADD %s %s' + cnx.system_sql(str('ALTER TABLE %s ADD %s %s' % (table, column, attrtype)), rollback_on_failure=False) self.info('added column %s to table %s', table, column) @@ -459,13 +457,13 @@ self.error('error while altering table %s: %s', table, ex) if extra_unique_index or entity.indexed: try: - syssource.create_index(session, table, column, + syssource.create_index(cnx, table, column, unique=extra_unique_index) except Exception as ex: self.error('error while creating index for %s.%s: %s', table, column, ex) # final relations are not infered, propagate - schema = session.vreg.schema + schema = cnx.vreg.schema try: eschema = schema.eschema(rdefdef.subject) except KeyError: @@ -475,18 +473,18 @@ # if relation type has been inserted in the same transaction, its final # attribute is still set to False, so we've to ensure it's False rschema.final = True - insert_rdef_on_subclasses(session, eschema, rschema, rdefdef, props) + insert_rdef_on_subclasses(cnx, eschema, rschema, rdefdef, props) # update existing entities with the default value of newly added attribute if default is not None: default = convert_default_value(self.rdefdef, default) - session.system_sql('UPDATE %s SET %s=%%(default)s' % (table, column), + cnx.system_sql('UPDATE %s SET %s=%%(default)s' % (table, column), {'default': default}) def revertprecommit_event(self): # revert changes on in memory schema if getattr(self, 'rdefdef', None) is None: return - self.session.vreg.schema.del_relation_def( + self.cnx.vreg.schema.del_relation_def( self.rdefdef.subject, self.rdefdef.name, self.rdefdef.object) # XXX revert changes on database @@ -505,12 +503,12 @@ entity = None # make pylint happy def precommit_event(self): - session = self.session + cnx = self.cnx entity = self.entity # update the in-memory schema first rdefdef = self.init_rdef(composite=entity.composite) # then make necessary changes to the system source database - schema = session.vreg.schema + schema = cnx.vreg.schema rtype = rdefdef.name rschema = schema.rschema(rtype) # this have to be done before permissions setting @@ -518,9 +516,9 @@ # need to add a column if the relation is inlined and if this is the # first occurence of "Subject relation Something" whatever Something if len(rschema.objects(rdefdef.subject)) == 1: - add_inline_relation_column(session, rdefdef.subject, rtype) + add_inline_relation_column(cnx, rdefdef.subject, rtype) eschema = schema[rdefdef.subject] - insert_rdef_on_subclasses(session, eschema, rschema, rdefdef, + insert_rdef_on_subclasses(cnx, eschema, rschema, rdefdef, {'composite': entity.composite}) else: if rschema.symmetric: @@ -533,13 +531,13 @@ # schema and if it has not been added during other event of the same # transaction if not (relation_already_defined or - rtype in session.transaction_data.get('createdtables', ())): + rtype in cnx.transaction_data.get('createdtables', ())): rschema = schema.rschema(rtype) # create the necessary table for sql in y2sql.rschema2sql(rschema).split(';'): if sql.strip(): - session.system_sql(sql) - session.transaction_data.setdefault('createdtables', []).append( + cnx.system_sql(sql) + cnx.transaction_data.setdefault('createdtables', []).append( rtype) # XXX revertprecommit_event @@ -550,12 +548,12 @@ rdef = None # make pylint happy def precommit_event(self): - session = self.session + cnx = self.cnx rdef = self.rdef rschema = rdef.rtype # make necessary changes to the system source database first rdeftype = rschema.final and 'CWAttribute' or 'CWRelation' - execute = session.execute + execute = cnx.execute rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R,' 'R eid %%(x)s' % rdeftype, {'x': rschema.eid}) lastrel = rset[0][0] == 0 @@ -567,19 +565,19 @@ 'R eid %%(r)s, X from_entity E, E eid %%(e)s' % rdeftype, {'r': rschema.eid, 'e': rdef.subject.eid}) - if rset[0][0] == 0 and not session.deleted_in_transaction(rdef.subject.eid): - ptypes = session.transaction_data.setdefault('pendingrtypes', set()) + if rset[0][0] == 0 and not cnx.deleted_in_transaction(rdef.subject.eid): + ptypes = cnx.transaction_data.setdefault('pendingrtypes', set()) ptypes.add(rschema.type) - DropColumn(session, table=SQL_PREFIX + str(rdef.subject), + DropColumn(cnx, table=SQL_PREFIX + str(rdef.subject), column=SQL_PREFIX + str(rschema)) elif lastrel: - DropRelationTable(session, str(rschema)) + DropRelationTable(cnx, str(rschema)) # then update the in-memory schema if rdef.subject not in ETYPE_NAME_MAP and rdef.object not in ETYPE_NAME_MAP: rschema.del_relation_def(rdef.subject, rdef.object) # if this is the last relation definition of this type, drop associated # relation type - if lastrel and not session.deleted_in_transaction(rschema.eid): + if lastrel and not cnx.deleted_in_transaction(rschema.eid): execute('DELETE CWRType X WHERE X eid %(x)s', {'x': rschema.eid}) def revertprecommit_event(self): @@ -590,7 +588,7 @@ rdef = self.rdef rdef.name = str(rdef.rtype) if rdef.subject not in ETYPE_NAME_MAP and rdef.object not in ETYPE_NAME_MAP: - self.session.vreg.schema.add_relation_def(rdef) + self.cnx.vreg.schema.add_relation_def(rdef) @@ -601,23 +599,23 @@ indexed_changed = null_allowed_changed = False def precommit_event(self): - session = self.session + cnx = self.cnx rdef = self.rdef = self.rschema.rdefs[self.rdefkey] # update the in-memory schema first self.oldvalues = dict( (attr, getattr(rdef, attr)) for attr in self.values) rdef.update(self.values) # then make necessary changes to the system source database - syssource = session.cnxset.source('system') + syssource = cnx.repo.system_source if 'indexed' in self.values: - syssource.update_rdef_indexed(session, rdef) + syssource.update_rdef_indexed(cnx, rdef) self.indexed_changed = True if 'cardinality' in self.values and (rdef.rtype.final or rdef.rtype.inlined) \ and self.values['cardinality'][0] != self.oldvalues['cardinality'][0]: - syssource.update_rdef_null_allowed(self.session, rdef) + syssource.update_rdef_null_allowed(self.cnx, rdef) self.null_allowed_changed = True if 'fulltextindexed' in self.values: - UpdateFTIndexOp.get_instance(session).add_data(rdef.subject) + UpdateFTIndexOp.get_instance(cnx).add_data(rdef.subject) def revertprecommit_event(self): if self.rdef is None: @@ -625,17 +623,17 @@ # revert changes on in memory schema self.rdef.update(self.oldvalues) # revert changes on database - syssource = self.session.cnxset.source('system') + syssource = self.cnx.repo.system_source if self.indexed_changed: - syssource.update_rdef_indexed(self.session, self.rdef) + syssource.update_rdef_indexed(self.cnx, self.rdef) if self.null_allowed_changed: - syssource.update_rdef_null_allowed(self.session, self.rdef) + syssource.update_rdef_null_allowed(self.cnx, self.rdef) def _set_modifiable_constraints(rdef): # for proper in-place modification of in-memory schema: if rdef.constraints # is already a list, reuse it (we're updating multiple constraints of the - # same rdef in the same transactions) + # same rdef in the same transaction) if not isinstance(rdef.constraints, list): rdef.constraints = list(rdef.constraints) @@ -646,7 +644,7 @@ size_cstr_changed = unique_changed = False def precommit_event(self): - session = self.session + cnx = self.cnx rdef = self.rdef # in-place modification of in-memory schema first _set_modifiable_constraints(rdef) @@ -655,15 +653,18 @@ else: self.critical('constraint %s for rdef %s was missing or already removed', self.oldcstr, rdef) + if cnx.deleted_in_transaction(rdef.eid): + # don't try to alter a table that's going away (or is already gone) + return # then update database: alter the physical schema on size/unique # constraint changes - syssource = session.cnxset.source('system') + syssource = cnx.repo.system_source cstrtype = self.oldcstr.type() if cstrtype == 'SizeConstraint': - syssource.update_rdef_column(session, rdef) + syssource.update_rdef_column(cnx, rdef) self.size_cstr_changed = True elif cstrtype == 'UniqueConstraint': - syssource.update_rdef_unique(session, rdef) + syssource.update_rdef_unique(cnx, rdef) self.unique_changed = True def revertprecommit_event(self): @@ -673,11 +674,11 @@ if self.oldcstr is not None: self.rdef.constraints.append(self.oldcstr) # revert changes on database - syssource = self.session.cnxset.source('system') + syssource = self.cnx.repo.system_source if self.size_cstr_changed: - syssource.update_rdef_column(self.session, self.rdef) + syssource.update_rdef_column(self.cnx, self.rdef) if self.unique_changed: - syssource.update_rdef_unique(self.session, self.rdef) + syssource.update_rdef_unique(self.cnx, self.rdef) class CWConstraintAddOp(CWConstraintDelOp): @@ -685,14 +686,14 @@ entity = None # make pylint happy def precommit_event(self): - session = self.session + cnx = self.cnx rdefentity = self.entity.reverse_constrained_by[0] # when the relation is added in the same transaction, the constraint # object is created by the operation adding the attribute or relation, # so there is nothing to do here - if session.added_in_transaction(rdefentity.eid): + if cnx.added_in_transaction(rdefentity.eid): return - rdef = self.rdef = session.vreg.schema.schema_by_eid(rdefentity.eid) + rdef = self.rdef = cnx.vreg.schema.schema_by_eid(rdefentity.eid) cstrtype = self.entity.type oldcstr = self.oldcstr = rdef.constraint_by_type(cstrtype) newcstr = self.newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value) @@ -704,13 +705,13 @@ rdef.constraints.append(newcstr) # then update database: alter the physical schema on size/unique # constraint changes - syssource = session.cnxset.source('system') + syssource = cnx.repo.system_source if cstrtype == 'SizeConstraint' and (oldcstr is None or oldcstr.max != newcstr.max): - syssource.update_rdef_column(session, rdef) + syssource.update_rdef_column(cnx, rdef) self.size_cstr_changed = True elif cstrtype == 'UniqueConstraint' and oldcstr is None: - syssource.update_rdef_unique(session, rdef) + syssource.update_rdef_unique(cnx, rdef) self.unique_changed = True @@ -718,19 +719,19 @@ entity = None # make pylint happy def precommit_event(self): - session = self.session + cnx = self.cnx prefix = SQL_PREFIX entity = self.entity table = '%s%s' % (prefix, entity.constraint_of[0].name) cols = ['%s%s' % (prefix, r.name) for r in entity.relations] - dbhelper = session.cnxset.source('system').dbhelper + dbhelper = cnx.repo.system_source.dbhelper sqls = dbhelper.sqls_create_multicol_unique_index(table, cols, entity.name) for sql in sqls: - session.system_sql(sql) + cnx.system_sql(sql) def postcommit_event(self): entity = self.entity - eschema = self.session.vreg.schema.schema_by_eid(entity.constraint_of[0].eid) + eschema = self.cnx.vreg.schema.schema_by_eid(entity.constraint_of[0].eid) attrs = [r.name for r in entity.relations] eschema._unique_together.append(attrs) @@ -740,17 +741,17 @@ cols = () # for pylint def precommit_event(self): - session = self.session + cnx = self.cnx prefix = SQL_PREFIX table = '%s%s' % (prefix, self.entity.type) - dbhelper = session.cnxset.source('system').dbhelper + dbhelper = cnx.repo.system_source.dbhelper cols = ['%s%s' % (prefix, c) for c in self.cols] sqls = dbhelper.sqls_drop_multicol_unique_index(table, cols, self.cstrname) for sql in sqls: - session.system_sql(sql) + cnx.system_sql(sql) def postcommit_event(self): - eschema = self.session.vreg.schema.schema_by_eid(self.entity.eid) + eschema = self.cnx.vreg.schema.schema_by_eid(self.entity.eid) cols = set(self.cols) unique_together = [ut for ut in eschema._unique_together if set(ut) != cols] @@ -765,7 +766,7 @@ def postcommit_event(self): # del_entity_type also removes entity's relations - self.session.vreg.schema.del_entity_type(self.etype) + self.cnx.vreg.schema.del_entity_type(self.etype) class MemSchemaCWRTypeAdd(MemSchemaOperation): @@ -773,10 +774,10 @@ rtypedef = None # make pylint happy def precommit_event(self): - self.session.vreg.schema.add_relation_type(self.rtypedef) + self.cnx.vreg.schema.add_relation_type(self.rtypedef) def revertprecommit_event(self): - self.session.vreg.schema.del_relation_type(self.rtypedef.name) + self.cnx.vreg.schema.del_relation_type(self.rtypedef.name) class MemSchemaCWRTypeDel(MemSchemaOperation): @@ -785,7 +786,7 @@ def postcommit_event(self): try: - self.session.vreg.schema.del_relation_type(self.rtype) + self.cnx.vreg.schema.del_relation_type(self.rtype) except KeyError: # s/o entity type have already been deleted pass @@ -799,14 +800,14 @@ def precommit_event(self): """the observed connections.cnxset has been commited""" try: - erschema = self.session.vreg.schema.schema_by_eid(self.eid) + erschema = self.cnx.vreg.schema.schema_by_eid(self.eid) except KeyError: # duh, schema not found, log error and skip operation self.warning('no schema for %s', self.eid) return perms = list(erschema.action_permissions(self.action)) if self.group_eid is not None: - perm = self.session.entity_from_eid(self.group_eid).name + perm = self.cnx.entity_from_eid(self.group_eid).name else: perm = erschema.rql_expression(self.expr) try: @@ -828,7 +829,7 @@ def precommit_event(self): """the observed connections set has been commited""" try: - erschema = self.session.vreg.schema.schema_by_eid(self.eid) + erschema = self.cnx.vreg.schema.schema_by_eid(self.eid) except KeyError: # duh, schema not found, log error and skip operation self.warning('no schema for %s', self.eid) @@ -840,7 +841,7 @@ return perms = list(erschema.action_permissions(self.action)) if self.group_eid is not None: - perm = self.session.entity_from_eid(self.group_eid).name + perm = self.cnx.entity_from_eid(self.group_eid).name else: perm = erschema.rql_expression(self.expr) try: @@ -857,8 +858,8 @@ etypeeid = parentetypeeid = None # make pylint happy def precommit_event(self): - eschema = self.session.vreg.schema.schema_by_eid(self.etypeeid) - parenteschema = self.session.vreg.schema.schema_by_eid(self.parentetypeeid) + eschema = self.cnx.vreg.schema.schema_by_eid(self.etypeeid) + parenteschema = self.cnx.vreg.schema.schema_by_eid(self.parentetypeeid) eschema._specialized_type = parenteschema.type parenteschema._specialized_by.append(eschema.type) @@ -870,8 +871,8 @@ def precommit_event(self): try: - eschema = self.session.vreg.schema.schema_by_eid(self.etypeeid) - parenteschema = self.session.vreg.schema.schema_by_eid(self.parentetypeeid) + eschema = self.cnx.vreg.schema.schema_by_eid(self.etypeeid) + parenteschema = self.cnx.vreg.schema.schema_by_eid(self.parentetypeeid) except KeyError: # etype removed, nothing to do return @@ -1030,14 +1031,14 @@ events = ('after_delete_relation',) def __call__(self): - session = self._cw + cnx = self._cw try: - rdef = session.vreg.schema.schema_by_eid(self.eidfrom) + rdef = cnx.vreg.schema.schema_by_eid(self.eidfrom) except KeyError: self.critical('cant get schema rdef associated to %s', self.eidfrom) return subjschema, rschema, objschema = rdef.as_triple() - pendingrdefs = session.transaction_data.setdefault('pendingrdefs', set()) + pendingrdefs = cnx.transaction_data.setdefault('pendingrdefs', set()) # first delete existing relation if necessary if rschema.final: rdeftype = 'CWAttribute' @@ -1045,11 +1046,11 @@ else: rdeftype = 'CWRelation' pendingrdefs.add((subjschema, rschema, objschema)) - if not (session.deleted_in_transaction(subjschema.eid) or - session.deleted_in_transaction(objschema.eid)): - session.execute('DELETE X %s Y WHERE X is %s, Y is %s' + if not (cnx.deleted_in_transaction(subjschema.eid) or + cnx.deleted_in_transaction(objschema.eid)): + cnx.execute('DELETE X %s Y WHERE X is %s, Y is %s' % (rschema, subjschema, objschema)) - RDefDelOp(session, rdef=rdef) + RDefDelOp(cnx, rdef=rdef) # CWAttribute / CWRelation hooks ############################################### @@ -1127,20 +1128,21 @@ self._cw.transaction_data.setdefault(self.eidfrom, []).append(self.eidto) -class BeforeDeleteConstrainedByHook(SyncSchemaHook): - __regid__ = 'syncdelconstrainedby' - __select__ = SyncSchemaHook.__select__ & hook.match_rtype('constrained_by') - events = ('before_delete_relation',) +class BeforeDeleteCWConstraintHook(SyncSchemaHook): + __regid__ = 'syncdelcwconstraint' + __select__ = SyncSchemaHook.__select__ & is_instance('CWConstraint') + events = ('before_delete_entity',) def __call__(self): - if self._cw.deleted_in_transaction(self.eidfrom): - return + entity = self.entity schema = self._cw.vreg.schema - rdef = schema.schema_by_eid(self.eidfrom) try: - cstr = rdef.constraint_by_eid(self.eidto) - except ValueError: - self._cw.critical('constraint no more accessible') + # KeyError, e.g. composite chain deletion + rdef = schema.schema_by_eid(entity.reverse_constrained_by[0].eid) + # IndexError + cstr = rdef.constraint_by_eid(entity.eid) + except (KeyError, IndexError): + self._cw.critical('constraint type no more accessible') else: CWConstraintDelOp(self._cw, rdef=rdef, oldcstr=cstr) @@ -1183,7 +1185,7 @@ def __call__(self): action = self.rtype.split('_', 1)[0] - if self._cw.describe(self.eidto)[0] == 'CWGroup': + if self._cw.entity_metas(self.eidto)['type'] == 'CWGroup': MemSchemaPermissionAdd(self._cw, action=action, eid=self.eidfrom, group_eid=self.eidto) else: # RQLExpression @@ -1204,7 +1206,7 @@ if self._cw.deleted_in_transaction(self.eidfrom): return action = self.rtype.split('_', 1)[0] - if self._cw.describe(self.eidto)[0] == 'CWGroup': + if self._cw.entity_metas(self.eidto)['type'] == 'CWGroup': MemSchemaPermissionDel(self._cw, action=action, eid=self.eidfrom, group_eid=self.eidto) else: # RQLExpression @@ -1222,26 +1224,26 @@ """ def postcommit_event(self): - session = self.session - source = session.repo.system_source - schema = session.repo.vreg.schema + cnx = self.cnx + source = cnx.repo.system_source + schema = cnx.repo.vreg.schema to_reindex = self.get_data() self.info('%i etypes need full text indexed reindexation', len(to_reindex)) for etype in to_reindex: - rset = session.execute('Any X WHERE X is %s' % etype) + rset = cnx.execute('Any X WHERE X is %s' % etype) self.info('Reindexing full text index for %i entity of type %s', len(rset), etype) still_fti = list(schema[etype].indexable_attributes()) for entity in rset.entities(): - source.fti_unindex_entities(session, [entity]) + source.fti_unindex_entities(cnx, [entity]) for container in entity.cw_adapt_to('IFTIndexable').fti_containers(): if still_fti or container is not entity: - source.fti_unindex_entities(session, [container]) - source.fti_index_entities(session, [container]) + source.fti_unindex_entities(cnx, [container]) + source.fti_index_entities(cnx, [container]) if to_reindex: # Transaction has already been committed - session.cnxset.commit() + cnx.cnxset.commit() diff -r 84738d495ffd -r 793377697c81 hooks/syncsession.py --- a/hooks/syncsession.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/syncsession.py Wed Sep 24 18:04:30 2014 +0200 @@ -42,15 +42,15 @@ """base class for group operation""" cnxuser = None # make pylint happy - def __init__(self, session, *args, **kwargs): + def __init__(self, cnx, *args, **kwargs): """override to get the group name before actual groups manipulation: we may temporarily loose right access during a commit event, so no query should be emitted while comitting """ rql = 'Any N WHERE G eid %(x)s, G name N' - result = session.execute(rql, {'x': kwargs['geid']}, build_descr=False) - hook.Operation.__init__(self, session, *args, **kwargs) + result = cnx.execute(rql, {'x': kwargs['geid']}, build_descr=False) + hook.Operation.__init__(self, cnx, *args, **kwargs) self.group = result[0][0] @@ -94,14 +94,14 @@ class _DelUserOp(hook.Operation): """close associated user's session when it is deleted""" - def __init__(self, session, cnxid): - self.cnxid = cnxid - hook.Operation.__init__(self, session) + def __init__(self, cnx, sessionid): + self.sessionid = sessionid + hook.Operation.__init__(self, cnx) def postcommit_event(self): """the observed connections set has been commited""" try: - self.session.repo.close(self.cnxid) + self.cnx.repo.close(self.sessionid) except BadConnectionId: pass # already closed @@ -148,7 +148,7 @@ """the observed connections set has been commited""" cwprop = self.cwprop if not cwprop.for_user: - self.session.vreg['propertyvalues'][cwprop.pkey] = cwprop.value + self.cnx.vreg['propertyvalues'][cwprop.pkey] = cwprop.value # if for_user is set, update is handled by a ChangeCWPropertyOp operation @@ -161,19 +161,19 @@ key, value = self.entity.pkey, self.entity.value if key.startswith('sources.'): return - session = self._cw + cnx = self._cw try: - value = session.vreg.typed_value(key, value) + value = cnx.vreg.typed_value(key, value) except UnknownProperty: msg = _('unknown property key %s') raise validation_error(self.entity, {('pkey', 'subject'): msg}, (key,)) except ValueError as ex: raise validation_error(self.entity, {('value', 'subject'): str(ex)}) - if not session.user.matching_groups('managers'): - session.add_relation(self.entity.eid, 'for_user', session.user.eid) + if not cnx.user.matching_groups('managers'): + cnx.add_relation(self.entity.eid, 'for_user', cnx.user.eid) else: - _AddCWPropertyOp(session, cwprop=self.entity) + _AddCWPropertyOp(cnx, cwprop=self.entity) class UpdateCWPropertyHook(AddCWPropertyHook): @@ -188,20 +188,20 @@ key, value = entity.pkey, entity.value if key.startswith('sources.'): return - session = self._cw + cnx = self._cw try: - value = session.vreg.typed_value(key, value) + value = cnx.vreg.typed_value(key, value) except UnknownProperty: return except ValueError as ex: raise validation_error(entity, {('value', 'subject'): str(ex)}) if entity.for_user: - for session_ in get_user_sessions(session.repo, entity.for_user[0].eid): - _ChangeCWPropertyOp(session, cwpropdict=session_.user.properties, + for session in get_user_sessions(cnx.repo, entity.for_user[0].eid): + _ChangeCWPropertyOp(cnx, cwpropdict=session.user.properties, key=key, value=value) else: # site wide properties - _ChangeCWPropertyOp(session, cwpropdict=session.vreg['propertyvalues'], + _ChangeCWPropertyOp(cnx, cwpropdict=cnx.vreg['propertyvalues'], key=key, value=value) @@ -211,13 +211,13 @@ def __call__(self): eid = self.entity.eid - session = self._cw - for eidfrom, rtype, eidto in session.transaction_data.get('pendingrelations', ()): + cnx = self._cw + for eidfrom, rtype, eidto in cnx.transaction_data.get('pendingrelations', ()): if rtype == 'for_user' and eidfrom == self.entity.eid: # if for_user was set, delete has already been handled break else: - _DelCWPropertyOp(session, cwpropdict=session.vreg['propertyvalues'], + _DelCWPropertyOp(cnx, cwpropdict=cnx.vreg['propertyvalues'], key=self.entity.pkey) @@ -227,17 +227,17 @@ events = ('after_add_relation',) def __call__(self): - session = self._cw + cnx = self._cw eidfrom = self.eidfrom - if not session.describe(eidfrom)[0] == 'CWProperty': + if not cnx.entity_metas(eidfrom)['type'] == 'CWProperty': return - key, value = session.execute('Any K,V WHERE P eid %(x)s,P pkey K,P value V', + key, value = cnx.execute('Any K,V WHERE P eid %(x)s,P pkey K,P value V', {'x': eidfrom})[0] - if session.vreg.property_info(key)['sitewide']: + if cnx.vreg.property_info(key)['sitewide']: msg = _("site-wide property can't be set for user") raise validation_error(eidfrom, {('for_user', 'subject'): msg}) - for session_ in get_user_sessions(session.repo, self.eidto): - _ChangeCWPropertyOp(session, cwpropdict=session_.user.properties, + for session in get_user_sessions(cnx.repo, self.eidto): + _ChangeCWPropertyOp(cnx, cwpropdict=session.user.properties, key=key, value=value) @@ -246,10 +246,10 @@ events = ('after_delete_relation',) def __call__(self): - session = self._cw - key = session.execute('Any K WHERE P eid %(x)s, P pkey K', + cnx = self._cw + key = cnx.execute('Any K WHERE P eid %(x)s, P pkey K', {'x': self.eidfrom})[0][0] - session.transaction_data.setdefault('pendingrelations', []).append( + cnx.transaction_data.setdefault('pendingrelations', []).append( (self.eidfrom, self.rtype, self.eidto)) - for session_ in get_user_sessions(session.repo, self.eidto): - _DelCWPropertyOp(session, cwpropdict=session_.user.properties, key=key) + for session in get_user_sessions(cnx.repo, self.eidto): + _DelCWPropertyOp(cnx, cwpropdict=session.user.properties, key=key) diff -r 84738d495ffd -r 793377697c81 hooks/syncsources.py --- a/hooks/syncsources.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/syncsources.py Wed Sep 24 18:04:30 2014 +0200 @@ -37,7 +37,7 @@ class SourceAddedOp(hook.Operation): entity = None # make pylint happy def postcommit_event(self): - self.session.repo.add_source(self.entity) + self.cnx.repo.add_source(self.entity) class SourceAddedHook(SourceHook): __regid__ = 'cw.sources.added' @@ -61,7 +61,7 @@ class SourceRemovedOp(hook.Operation): uri = None # make pylint happy def postcommit_event(self): - self.session.repo.remove_source(self.uri) + self.cnx.repo.remove_source(self.uri) class SourceRemovedHook(SourceHook): __regid__ = 'cw.sources.removed' @@ -79,7 +79,7 @@ def precommit_event(self): self.__processed = [] for source in self.get_data(): - if not self.session.deleted_in_transaction(source.eid): + if not self.cnx.deleted_in_transaction(source.eid): conf = source.repo_source.check_config(source) self.__processed.append( (source, conf) ) @@ -92,16 +92,13 @@ oldname = newname = None # make pylint happy def precommit_event(self): - source = self.session.repo.sources_by_uri[self.oldname] - if source.copy_based_source: - sql = 'UPDATE entities SET asource=%(newname)s WHERE asource=%(oldname)s' - else: - sql = 'UPDATE entities SET source=%(newname)s, asource=%(newname)s WHERE source=%(oldname)s' - self.session.system_sql(sql, {'oldname': self.oldname, + source = self.cnx.repo.sources_by_uri[self.oldname] + sql = 'UPDATE entities SET asource=%(newname)s WHERE asource=%(oldname)s' + self.cnx.system_sql(sql, {'oldname': self.oldname, 'newname': self.newname}) def postcommit_event(self): - repo = self.session.repo + repo = self.cnx.repo # XXX race condition source = repo.sources_by_uri.pop(self.oldname) source.uri = self.newname @@ -109,11 +106,6 @@ repo.sources_by_uri[self.newname] = source repo._type_source_cache.clear() clear_cache(repo, 'source_defs') - if not source.copy_based_source: - repo._extid_cache.clear() - repo._clear_planning_caches() - for cnxset in repo.cnxsets: - cnxset.source_cnxs[self.oldname] = cnxset.source_cnxs.pop(self.oldname) class SourceUpdatedHook(SourceHook): @@ -172,7 +164,7 @@ class SourceMappingChangedOp(hook.DataOperationMixIn, hook.Operation): def check_or_update(self, checkonly): - session = self.session + cnx = self.cnx # take care, can't call get_data() twice try: data = self.__data @@ -181,10 +173,10 @@ for schemacfg, source in data: if source is None: source = schemacfg.cwsource.repo_source - if session.added_in_transaction(schemacfg.eid): - if not session.deleted_in_transaction(schemacfg.eid): + if cnx.added_in_transaction(schemacfg.eid): + if not cnx.deleted_in_transaction(schemacfg.eid): source.add_schema_config(schemacfg, checkonly=checkonly) - elif session.deleted_in_transaction(schemacfg.eid): + elif cnx.deleted_in_transaction(schemacfg.eid): source.del_schema_config(schemacfg, checkonly=checkonly) else: source.update_schema_config(schemacfg, checkonly=checkonly) diff -r 84738d495ffd -r 793377697c81 hooks/test/data/schema.py --- a/hooks/test/data/schema.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/test/data/schema.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -16,10 +16,23 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -from yams.buildobjs import RelationDefinition +from yams.buildobjs import RelationDefinition, EntityType, String class friend(RelationDefinition): subject = ('CWUser', 'CWGroup') object = ('CWUser', 'CWGroup') symmetric = True +class Folder(EntityType): + name = String() + +class parent(RelationDefinition): + subject = 'Folder' + object = 'Folder' + composite = 'object' + cardinality = '?*' + +class children(RelationDefinition): + subject = 'Folder' + object = 'Folder' + composite = 'subject' diff -r 84738d495ffd -r 793377697c81 hooks/test/unittest_bookmarks.py --- a/hooks/test/unittest_bookmarks.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/test/unittest_bookmarks.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -22,16 +22,17 @@ def test_auto_delete_bookmarks(self): - beid = self.execute('INSERT Bookmark X: X title "hop", X path "view", X bookmarked_by U ' - 'WHERE U login "admin"')[0][0] - self.execute('SET X bookmarked_by U WHERE U login "anon"') - self.commit() - self.execute('DELETE X bookmarked_by U WHERE U login "admin"') - self.commit() - self.assertTrue(self.execute('Any X WHERE X eid %(x)s', {'x': beid})) - self.execute('DELETE X bookmarked_by U WHERE U login "anon"') - self.commit() - self.assertFalse(self.execute('Any X WHERE X eid %(x)s', {'x': beid})) + with self.admin_access.repo_cnx() as cnx: + beid = cnx.execute('INSERT Bookmark X: X title "hop", X path "view", X bookmarked_by U ' + 'WHERE U login "admin"')[0][0] + cnx.execute('SET X bookmarked_by U WHERE U login "anon"') + cnx.commit() + cnx.execute('DELETE X bookmarked_by U WHERE U login "admin"') + cnx.commit() + self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x': beid})) + cnx.execute('DELETE X bookmarked_by U WHERE U login "anon"') + cnx.commit() + self.assertFalse(cnx.execute('Any X WHERE X eid %(x)s', {'x': beid})) if __name__ == '__main__': unittest_main() diff -r 84738d495ffd -r 793377697c81 hooks/test/unittest_hooks.py --- a/hooks/test/unittest_hooks.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/test/unittest_hooks.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -30,177 +30,189 @@ class CoreHooksTC(CubicWebTC): def test_inlined(self): - self.assertEqual(self.repo.schema['sender'].inlined, True) - self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') - self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"') - eeid = self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P ' - 'WHERE Y is EmailAddress, P is EmailPart')[0][0] - self.execute('SET X sender Y WHERE X is Email, Y is EmailAddress') - rset = self.execute('Any S WHERE X sender S, X eid %s' % eeid) - self.assertEqual(len(rset), 1) + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(self.repo.schema['sender'].inlined, True) + cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') + cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, ' + 'X content "this is a test"') + eeid = cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", ' + 'X sender Y, X recipients Y, X parts P ' + 'WHERE Y is EmailAddress, P is EmailPart')[0][0] + cnx.execute('SET X sender Y WHERE X is Email, Y is EmailAddress') + rset = cnx.execute('Any S WHERE X sender S, X eid %s' % eeid) + self.assertEqual(len(rset), 1) def test_symmetric(self): - req = self.request() - u1 = self.create_user(req, u'1') - u2 = self.create_user(req, u'2') - u3 = self.create_user(req, u'3') - ga = req.create_entity('CWGroup', name=u'A') - gb = req.create_entity('CWGroup', name=u'B') - u1.cw_set(friend=u2) - u2.cw_set(friend=u3) - ga.cw_set(friend=gb) - ga.cw_set(friend=u1) - self.commit() - req = self.request() - for l1, l2 in ((u'1', u'2'), - (u'2', u'3')): - self.assertTrue(req.execute('Any U1,U2 WHERE U1 friend U2, U1 login %(l1)s, U2 login %(l2)s', - {'l1': l1, 'l2': l2})) - self.assertTrue(req.execute('Any U1,U2 WHERE U2 friend U1, U1 login %(l1)s, U2 login %(l2)s', - {'l1': l1, 'l2': l2})) - self.assertTrue(req.execute('Any GA,GB WHERE GA friend GB, GA name "A", GB name "B"')) - self.assertTrue(req.execute('Any GA,GB WHERE GB friend GA, GA name "A", GB name "B"')) - self.assertTrue(req.execute('Any GA,U1 WHERE GA friend U1, GA name "A", U1 login "1"')) - self.assertTrue(req.execute('Any GA,U1 WHERE U1 friend GA, GA name "A", U1 login "1"')) - self.assertFalse(req.execute('Any GA,U WHERE GA friend U, GA name "A", U login "2"')) - for l1, l2 in ((u'1', u'3'), - (u'3', u'1')): - self.assertFalse(req.execute('Any U1,U2 WHERE U1 friend U2, U1 login %(l1)s, U2 login %(l2)s', - {'l1': l1, 'l2': l2})) - self.assertFalse(req.execute('Any U1,U2 WHERE U2 friend U1, U1 login %(l1)s, U2 login %(l2)s', - {'l1': l1, 'l2': l2})) + with self.admin_access.repo_cnx() as cnx: + u1 = self.create_user(cnx, u'1') + u2 = self.create_user(cnx, u'2') + u3 = self.create_user(cnx, u'3') + ga = cnx.create_entity('CWGroup', name=u'A') + gb = cnx.create_entity('CWGroup', name=u'B') + u1.cw_set(friend=u2) + u2.cw_set(friend=u3) + ga.cw_set(friend=gb) + ga.cw_set(friend=u1) + cnx.commit() + for l1, l2 in ((u'1', u'2'), + (u'2', u'3')): + self.assertTrue(cnx.execute('Any U1,U2 WHERE U1 friend U2, U1 login %(l1)s, U2 login %(l2)s', + {'l1': l1, 'l2': l2})) + self.assertTrue(cnx.execute('Any U1,U2 WHERE U2 friend U1, U1 login %(l1)s, U2 login %(l2)s', + {'l1': l1, 'l2': l2})) + self.assertTrue(cnx.execute('Any GA,GB WHERE GA friend GB, GA name "A", GB name "B"')) + self.assertTrue(cnx.execute('Any GA,GB WHERE GB friend GA, GA name "A", GB name "B"')) + self.assertTrue(cnx.execute('Any GA,U1 WHERE GA friend U1, GA name "A", U1 login "1"')) + self.assertTrue(cnx.execute('Any GA,U1 WHERE U1 friend GA, GA name "A", U1 login "1"')) + self.assertFalse(cnx.execute('Any GA,U WHERE GA friend U, GA name "A", U login "2"')) + for l1, l2 in ((u'1', u'3'), + (u'3', u'1')): + self.assertFalse(cnx.execute('Any U1,U2 WHERE U1 friend U2, U1 login %(l1)s, U2 login %(l2)s', + {'l1': l1, 'l2': l2})) + self.assertFalse(cnx.execute('Any U1,U2 WHERE U2 friend U1, U1 login %(l1)s, U2 login %(l2)s', + {'l1': l1, 'l2': l2})) def test_html_tidy_hook(self): - req = self.request() - entity = req.create_entity('Workflow', name=u'wf1', - description_format=u'text/html', - description=u'yo') - self.assertEqual(entity.description, u'yo') - entity = req.create_entity('Workflow', name=u'wf2', - description_format=u'text/html', - description=u'yo') - self.assertEqual(entity.description, u'yo') - entity = req.create_entity('Workflow', name=u'wf3', - description_format=u'text/html', - description=u'yo') - self.assertEqual(entity.description, u'yo') - entity = req.create_entity('Workflow', name=u'wf4', - description_format=u'text/html', - description=u'R&D') - self.assertEqual(entity.description, u'R&D') - entity = req.create_entity('Workflow', name=u'wf5', - description_format=u'text/html', - description=u"
c'est l'été") - self.assertEqual(entity.description, u"
c'est l'été
") + with self.admin_access.client_cnx() as cnx: + entity = cnx.create_entity('Workflow', name=u'wf1', + description_format=u'text/html', + description=u'yo') + self.assertEqual(entity.description, u'yo') + entity = cnx.create_entity('Workflow', name=u'wf2', + description_format=u'text/html', + description=u'yo') + self.assertEqual(entity.description, u'yo') + entity = cnx.create_entity('Workflow', name=u'wf3', + description_format=u'text/html', + description=u'yo') + self.assertEqual(entity.description, u'yo') + entity = cnx.create_entity('Workflow', name=u'wf4', + description_format=u'text/html', + description=u'R&D') + self.assertEqual(entity.description, u'R&D') + entity = cnx.create_entity('Workflow', name=u'wf5', + description_format=u'text/html', + description=u"
c'est l'été") + self.assertEqual(entity.description, u"
c'est l'été
") def test_nonregr_html_tidy_hook_no_update(self): - entity = self.request().create_entity('Workflow', name=u'wf1', - description_format=u'text/html', - description=u'yo') - entity.cw_set(name=u'wf2') - self.assertEqual(entity.description, u'yo') - entity.cw_set(description=u'R&D

yo') - self.assertEqual(entity.description, u'R&D

yo

') + with self.admin_access.client_cnx() as cnx: + entity = cnx.create_entity('Workflow', name=u'wf1', + description_format=u'text/html', + description=u'yo') + entity.cw_set(name=u'wf2') + self.assertEqual(entity.description, u'yo') + entity.cw_set(description=u'R&D

yo') + self.assertEqual(entity.description, u'R&D

yo

') def test_metadata_cwuri(self): - entity = self.request().create_entity('Workflow', name=u'wf1') - self.assertEqual(entity.cwuri, self.repo.config['base-url'] + str(entity.eid)) + with self.admin_access.repo_cnx() as cnx: + entity = cnx.create_entity('Workflow', name=u'wf1') + self.assertEqual(entity.cwuri, self.repo.config['base-url'] + str(entity.eid)) def test_metadata_creation_modification_date(self): - _now = datetime.now() - entity = self.request().create_entity('Workflow', name=u'wf1') - self.assertEqual((entity.creation_date - _now).seconds, 0) - self.assertEqual((entity.modification_date - _now).seconds, 0) + with self.admin_access.repo_cnx() as cnx: + _now = datetime.now() + entity = cnx.create_entity('Workflow', name=u'wf1') + self.assertEqual((entity.creation_date - _now).seconds, 0) + self.assertEqual((entity.modification_date - _now).seconds, 0) def test_metadata_created_by(self): - entity = self.request().create_entity('Bookmark', title=u'wf1', path=u'/view') - self.commit() # fire operations - self.assertEqual(len(entity.created_by), 1) # make sure we have only one creator - self.assertEqual(entity.created_by[0].eid, self.session.user.eid) + with self.admin_access.repo_cnx() as cnx: + entity = cnx.create_entity('Bookmark', title=u'wf1', path=u'/view') + cnx.commit() # fire operations + self.assertEqual(len(entity.created_by), 1) # make sure we have only one creator + self.assertEqual(entity.created_by[0].eid, cnx.user.eid) def test_metadata_owned_by(self): - entity = self.request().create_entity('Bookmark', title=u'wf1', path=u'/view') - self.commit() # fire operations - self.assertEqual(len(entity.owned_by), 1) # make sure we have only one owner - self.assertEqual(entity.owned_by[0].eid, self.session.user.eid) + with self.admin_access.repo_cnx() as cnx: + entity = cnx.create_entity('Bookmark', title=u'wf1', path=u'/view') + cnx.commit() # fire operations + self.assertEqual(len(entity.owned_by), 1) # make sure we have only one owner + self.assertEqual(entity.owned_by[0].eid, cnx.user.eid) def test_user_login_stripped(self): - req = self.request() - u = self.create_user(req, ' joe ') - tname = self.execute('Any L WHERE E login L, E eid %(e)s', - {'e': u.eid})[0][0] - self.assertEqual(tname, 'joe') - self.execute('SET X login " jijoe " WHERE X eid %(x)s', {'x': u.eid}) - tname = self.execute('Any L WHERE E login L, E eid %(e)s', - {'e': u.eid})[0][0] - self.assertEqual(tname, 'jijoe') + with self.admin_access.repo_cnx() as cnx: + u = self.create_user(cnx, ' joe ') + tname = cnx.execute('Any L WHERE E login L, E eid %(e)s', + {'e': u.eid})[0][0] + self.assertEqual(tname, 'joe') + cnx.execute('SET X login " jijoe " WHERE X eid %(x)s', {'x': u.eid}) + tname = cnx.execute('Any L WHERE E login L, E eid %(e)s', + {'e': u.eid})[0][0] + self.assertEqual(tname, 'jijoe') class UserGroupHooksTC(CubicWebTC): def test_user_synchronization(self): - req = self.request() - self.create_user(req, 'toto', password='hop', commit=False) - self.assertRaises(AuthenticationError, - self.repo.connect, u'toto', password='hop') - self.commit() - cnxid = self.repo.connect(u'toto', password='hop') - self.assertNotEqual(cnxid, self.session.id) - self.execute('DELETE CWUser X WHERE X login "toto"') - self.repo.execute(cnxid, 'State X') - self.commit() - self.assertRaises(BadConnectionId, - self.repo.execute, cnxid, 'State X') + with self.admin_access.repo_cnx() as cnx: + self.create_user(cnx, 'toto', password='hop', commit=False) + self.assertRaises(AuthenticationError, + self.repo.connect, u'toto', password='hop') + cnx.commit() + cnxid = self.repo.connect(u'toto', password='hop') + self.assertNotEqual(cnxid, cnx.sessionid) + cnx.execute('DELETE CWUser X WHERE X login "toto"') + self.repo.execute(cnxid, 'State X') + cnx.commit() + self.assertRaises(BadConnectionId, + self.repo.execute, cnxid, 'State X') def test_user_group_synchronization(self): - user = self.session.user - self.assertEqual(user.groups, set(('managers',))) - self.execute('SET X in_group G WHERE X eid %s, G name "guests"' % user.eid) - self.assertEqual(user.groups, set(('managers',))) - self.commit() - self.assertEqual(user.groups, set(('managers', 'guests'))) - self.execute('DELETE X in_group G WHERE X eid %s, G name "guests"' % user.eid) - self.assertEqual(user.groups, set(('managers', 'guests'))) - self.commit() - self.assertEqual(user.groups, set(('managers',))) + with self.admin_access.repo_cnx() as cnx: + user = cnx.user + self.assertEqual(user.groups, set(('managers',))) + cnx.execute('SET X in_group G WHERE X eid %s, G name "guests"' % user.eid) + self.assertEqual(user.groups, set(('managers',))) + cnx.commit() + self.assertEqual(user.groups, set(('managers', 'guests'))) + cnx.execute('DELETE X in_group G WHERE X eid %s, G name "guests"' % user.eid) + self.assertEqual(user.groups, set(('managers', 'guests'))) + cnx.commit() + self.assertEqual(user.groups, set(('managers',))) def test_user_composite_owner(self): - req = self.request() - ueid = self.create_user(req, 'toto').eid - # composite of euser should be owned by the euser regardless of who created it - self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", U use_email X ' - 'WHERE U login "toto"') - self.commit() - self.assertEqual(self.execute('Any A WHERE X owned_by U, U use_email X,' - 'U login "toto", X address A')[0][0], - 'toto@logilab.fr') + with self.admin_access.repo_cnx() as cnx: + self.create_user(cnx, 'toto').eid + # composite of euser should be owned by the euser regardless of who created it + cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", U use_email X ' + 'WHERE U login "toto"') + cnx.commit() + self.assertEqual(cnx.execute('Any A WHERE X owned_by U, U use_email X,' + 'U login "toto", X address A')[0][0], + 'toto@logilab.fr') def test_no_created_by_on_deleted_entity(self): - eid = self.execute('INSERT EmailAddress X: X address "toto@logilab.fr"')[0][0] - self.execute('DELETE EmailAddress X WHERE X eid %s' % eid) - self.commit() - self.assertFalse(self.execute('Any X WHERE X created_by Y, X eid >= %(x)s', {'x': eid})) + with self.admin_access.repo_cnx() as cnx: + eid = cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr"')[0][0] + cnx.execute('DELETE EmailAddress X WHERE X eid %s' % eid) + cnx.commit() + self.assertFalse(cnx.execute('Any X WHERE X created_by Y, X eid >= %(x)s', {'x': eid})) class SchemaHooksTC(CubicWebTC): def test_duplicate_etype_error(self): - # check we can't add a CWEType or CWRType entity if it already exists one - # with the same name - self.assertRaises(ValidationError, - self.execute, 'INSERT CWEType X: X name "CWUser"') - self.assertRaises(ValidationError, - self.execute, 'INSERT CWRType X: X name "in_group"') + with self.admin_access.repo_cnx() as cnx: + # check we can't add a CWEType or CWRType entity if it already exists one + # with the same name + self.assertRaises(ValidationError, + cnx.execute, 'INSERT CWEType X: X name "CWUser"') + cnx.rollback() + self.assertRaises(ValidationError, + cnx.execute, 'INSERT CWRType X: X name "in_group"') def test_validation_unique_constraint(self): - with self.assertRaises(ValidationError) as cm: - self.execute('INSERT CWUser X: X login "admin"') - ex = cm.exception - ex.translate(unicode) - self.assertIsInstance(ex.entity, int) - self.assertEqual(ex.errors, {'login-subject': 'the value "admin" is already used, use another one'}) + with self.admin_access.repo_cnx() as cnx: + with self.assertRaises(ValidationError) as cm: + cnx.execute('INSERT CWUser X: X login "admin"') + ex = cm.exception + ex.translate(unicode) + self.assertIsInstance(ex.entity, int) + self.assertEqual(ex.errors, {'login-subject': 'the value "admin" is already used, use another one'}) if __name__ == '__main__': diff -r 84738d495ffd -r 793377697c81 hooks/test/unittest_integrity.py --- a/hooks/test/unittest_integrity.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/test/unittest_integrity.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -24,90 +24,138 @@ class CoreHooksTC(CubicWebTC): def test_delete_internal_entities(self): - self.assertRaises(ValidationError, self.execute, - 'DELETE CWEType X WHERE X name "CWEType"') - self.assertRaises(ValidationError, self.execute, - 'DELETE CWRType X WHERE X name "relation_type"') - self.assertRaises(ValidationError, self.execute, - 'DELETE CWGroup X WHERE X name "owners"') + with self.admin_access.repo_cnx() as cnx: + self.assertRaises(ValidationError, cnx.execute, + 'DELETE CWEType X WHERE X name "CWEType"') + cnx.rollback() + self.assertRaises(ValidationError, cnx.execute, + 'DELETE CWRType X WHERE X name "relation_type"') + cnx.rollback() + self.assertRaises(ValidationError, cnx.execute, + 'DELETE CWGroup X WHERE X name "owners"') def test_delete_required_relations_subject(self): - self.execute('INSERT CWUser X: X login "toto", X upassword "hop", X in_group Y ' - 'WHERE Y name "users"') - self.commit() - self.execute('DELETE X in_group Y WHERE X login "toto", Y name "users"') - self.assertRaises(ValidationError, self.commit) - self.execute('DELETE X in_group Y WHERE X login "toto"') - self.execute('SET X in_group Y WHERE X login "toto", Y name "guests"') - self.commit() + with self.admin_access.repo_cnx() as cnx: + cnx.execute('INSERT CWUser X: X login "toto", X upassword "hop", X in_group Y ' + 'WHERE Y name "users"') + cnx.commit() + cnx.execute('DELETE X in_group Y WHERE X login "toto", Y name "users"') + self.assertRaises(ValidationError, cnx.commit) + cnx.rollback() + cnx.execute('DELETE X in_group Y WHERE X login "toto"') + cnx.execute('SET X in_group Y WHERE X login "toto", Y name "guests"') + cnx.commit() def test_static_vocabulary_check(self): - self.assertRaises(ValidationError, - self.execute, - 'SET X composite "whatever" WHERE X from_entity FE, FE name "CWUser", X relation_type RT, RT name "in_group"') + with self.admin_access.repo_cnx() as cnx: + self.assertRaises(ValidationError, + cnx.execute, + 'SET X composite "whatever" WHERE X from_entity FE, FE name "CWUser", ' + 'X relation_type RT, RT name "in_group"') def test_missing_required_relations_subject_inline(self): - # missing in_group relation - self.execute('INSERT CWUser X: X login "toto", X upassword "hop"') - self.assertRaises(ValidationError, - self.commit) + with self.admin_access.repo_cnx() as cnx: + # missing in_group relation + cnx.execute('INSERT CWUser X: X login "toto", X upassword "hop"') + self.assertRaises(ValidationError, cnx.commit) def test_composite_1(self): - self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') - self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"') - self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P ' - 'WHERE Y is EmailAddress, P is EmailPart') - self.assertTrue(self.execute('Email X WHERE X sender Y')) - self.commit() - self.execute('DELETE Email X') - rset = self.execute('Any X WHERE X is EmailPart') - self.assertEqual(len(rset), 1) - self.commit() - rset = self.execute('Any X WHERE X is EmailPart') - self.assertEqual(len(rset), 0) + with self.admin_access.repo_cnx() as cnx: + cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') + cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, ' + 'X content "this is a test"') + cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, ' + 'X recipients Y, X parts P ' + 'WHERE Y is EmailAddress, P is EmailPart') + self.assertTrue(cnx.execute('Email X WHERE X sender Y')) + cnx.commit() + cnx.execute('DELETE Email X') + rset = cnx.execute('Any X WHERE X is EmailPart') + self.assertEqual(len(rset), 0) + cnx.commit() + rset = cnx.execute('Any X WHERE X is EmailPart') + self.assertEqual(len(rset), 0) def test_composite_2(self): - self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') - self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"') - self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P ' - 'WHERE Y is EmailAddress, P is EmailPart') - self.commit() - self.execute('DELETE Email X') - self.execute('DELETE EmailPart X') - self.commit() - rset = self.execute('Any X WHERE X is EmailPart') - self.assertEqual(len(rset), 0) + with self.admin_access.repo_cnx() as cnx: + cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') + cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, ' + 'X content "this is a test"') + cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, ' + 'X recipients Y, X parts P ' + 'WHERE Y is EmailAddress, P is EmailPart') + cnx.commit() + cnx.execute('DELETE Email X') + cnx.execute('DELETE EmailPart X') + cnx.commit() + rset = cnx.execute('Any X WHERE X is EmailPart') + self.assertEqual(len(rset), 0) def test_composite_redirection(self): - self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') - self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"') - self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P ' - 'WHERE Y is EmailAddress, P is EmailPart') - self.execute('INSERT Email X: X messageid "<2345>", X subject "test2", X sender Y, X recipients Y ' - 'WHERE Y is EmailAddress') - self.commit() - self.execute('DELETE X parts Y WHERE X messageid "<1234>"') - self.execute('SET X parts Y WHERE X messageid "<2345>"') - self.commit() - rset = self.execute('Any X WHERE X is EmailPart') - self.assertEqual(len(rset), 1) - self.assertEqual(rset.get_entity(0, 0).reverse_parts[0].messageid, '<2345>') + with self.admin_access.repo_cnx() as cnx: + cnx.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') + cnx.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, ' + 'X content "this is a test"') + cnx.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, ' + 'X recipients Y, X parts P ' + 'WHERE Y is EmailAddress, P is EmailPart') + cnx.execute('INSERT Email X: X messageid "<2345>", X subject "test2", X sender Y, ' + 'X recipients Y ' + 'WHERE Y is EmailAddress') + cnx.commit() + cnx.execute('DELETE X parts Y WHERE X messageid "<1234>"') + cnx.execute('SET X parts Y WHERE X messageid "<2345>"') + cnx.commit() + rset = cnx.execute('Any X WHERE X is EmailPart') + self.assertEqual(len(rset), 1) + self.assertEqual(rset.get_entity(0, 0).reverse_parts[0].messageid, '<2345>') + + def test_composite_object_relation_deletion(self): + with self.admin_access.repo_cnx() as cnx: + root = cnx.create_entity('Folder', name=u'root') + a = cnx.create_entity('Folder', name=u'a', parent=root) + cnx.create_entity('Folder', name=u'b', parent=a) + cnx.create_entity('Folder', name=u'c', parent=root) + cnx.commit() + cnx.execute('DELETE Folder F WHERE F name "a"') + cnx.execute('DELETE F parent R WHERE R name "root"') + cnx.commit() + self.assertEqual([['root'], ['c']], + cnx.execute('Any NF WHERE F is Folder, F name NF').rows) + self.assertEqual([], cnx.execute('Any NF,NP WHERE F parent P, F name NF, P name NP').rows) + + def test_composite_subject_relation_deletion(self): + with self.admin_access.repo_cnx() as cnx: + root = cnx.create_entity('Folder', name=u'root') + a = cnx.create_entity('Folder', name=u'a') + b = cnx.create_entity('Folder', name=u'b') + c = cnx.create_entity('Folder', name=u'c') + root.cw_set(children=(a, c)) + a.cw_set(children=b) + cnx.commit() + cnx.execute('DELETE Folder F WHERE F name "a"') + cnx.execute('DELETE R children F WHERE R name "root"') + cnx.commit() + self.assertEqual([['root'], ['c']], + cnx.execute('Any NF WHERE F is Folder, F name NF').rows) + self.assertEqual([], cnx.execute('Any NF,NP WHERE F parent P, F name NF, P name NP').rows) def test_unsatisfied_constraints(self): - releid = self.execute('SET U in_group G WHERE G name "owners", U login "admin"')[0][0] - with self.assertRaises(ValidationError) as cm: - self.commit() + with self.admin_access.repo_cnx() as cnx: + cnx.execute('SET U in_group G WHERE G name "owners", U login "admin"')[0][0] + with self.assertRaises(ValidationError) as cm: + cnx.commit() self.assertEqual(cm.exception.errors, - {'in_group-object': u'RQLConstraint NOT O name "owners" failed'}) + {'in_group-object': u'RQLConstraint NOT O name "owners" failed'}) def test_unique_constraint(self): - req = self.request() - entity = req.create_entity('CWGroup', name=u'trout') - self.commit() - self.assertRaises(ValidationError, req.create_entity, 'CWGroup', name=u'trout') - self.rollback() - req.execute('SET X name "trout" WHERE X eid %(x)s', {'x': entity.eid}) - self.commit() + with self.admin_access.repo_cnx() as cnx: + entity = cnx.create_entity('CWGroup', name=u'trout') + cnx.commit() + self.assertRaises(ValidationError, cnx.create_entity, 'CWGroup', name=u'trout') + cnx.rollback() + cnx.execute('SET X name "trout" WHERE X eid %(x)s', {'x': entity.eid}) + cnx.commit() if __name__ == '__main__': from logilab.common.testlib import unittest_main diff -r 84738d495ffd -r 793377697c81 hooks/test/unittest_syncschema.py --- a/hooks/test/unittest_syncschema.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/test/unittest_syncschema.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -17,344 +17,362 @@ # with CubicWeb. If not, see . """cubicweb.server.hooks.syncschema unit and functional tests""" -from logilab.common.testlib import TestCase, unittest_main +from logilab.common.testlib import unittest_main from cubicweb import ValidationError, Binary from cubicweb.schema import META_RTYPES from cubicweb.devtools.testlib import CubicWebTC from cubicweb.server.sqlutils import SQL_PREFIX -from cubicweb.devtools.repotest import schema_eids_idx, restore_schema_eids_idx +from cubicweb.devtools.repotest import schema_eids_idx def tearDownModule(*args): del SchemaModificationHooksTC.schema_eids class SchemaModificationHooksTC(CubicWebTC): - reset_schema = True def setUp(self): super(SchemaModificationHooksTC, self).setUp() self.repo.set_schema(self.repo.deserialize_schema(), resetvreg=False) self.__class__.schema_eids = schema_eids_idx(self.repo.schema) - def index_exists(self, etype, attr, unique=False): - self.session.set_cnxset() - dbhelper = self.session.cnxset.source('system').dbhelper - sqlcursor = self.session.cnxset['system'] - return dbhelper.index_exists(sqlcursor, SQL_PREFIX + etype, SQL_PREFIX + attr, unique=unique) + def index_exists(self, cnx, etype, attr, unique=False): + dbhelper = self.repo.system_source.dbhelper + with cnx.ensure_cnx_set: + sqlcursor = cnx.cnxset.cu + return dbhelper.index_exists(sqlcursor, + SQL_PREFIX + etype, + SQL_PREFIX + attr, + unique=unique) - def _set_perms(self, eid): - self.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup', - {'x': eid}) - self.execute('SET X add_permission G WHERE X eid %(x)s, G is CWGroup, G name "managers"', - {'x': eid}) - self.execute('SET X delete_permission G WHERE X eid %(x)s, G is CWGroup, G name "owners"', - {'x': eid}) + def _set_perms(self, cnx, eid): + cnx.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup', + {'x': eid}) + cnx.execute('SET X add_permission G WHERE X eid %(x)s, G is CWGroup, ' + 'G name "managers"', {'x': eid}) + cnx.execute('SET X delete_permission G WHERE X eid %(x)s, G is CWGroup, ' + 'G name "owners"', {'x': eid}) - def _set_attr_perms(self, eid): - self.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup', - {'x': eid}) - self.execute('SET X update_permission G WHERE X eid %(x)s, G is CWGroup, G name "managers"', - {'x': eid}) + def _set_attr_perms(self, cnx, eid): + cnx.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup', + {'x': eid}) + cnx.execute('SET X update_permission G WHERE X eid %(x)s, G is CWGroup, G name "managers"', + {'x': eid}) def test_base(self): - schema = self.repo.schema - self.session.set_cnxset() - dbhelper = self.session.cnxset.source('system').dbhelper - sqlcursor = self.session.cnxset['system'] - self.assertFalse(schema.has_entity('Societe2')) - self.assertFalse(schema.has_entity('concerne2')) - # schema should be update on insertion (after commit) - eeid = self.execute('INSERT CWEType X: X name "Societe2", X description "", X final FALSE')[0][0] - self._set_perms(eeid) - self.execute('INSERT CWRType X: X name "concerne2", X description "", X final FALSE, X symmetric FALSE') - self.assertFalse(schema.has_entity('Societe2')) - self.assertFalse(schema.has_entity('concerne2')) - # have to commit before adding definition relations - self.commit() - self.assertTrue(schema.has_entity('Societe2')) - self.assertTrue(schema.has_relation('concerne2')) - attreid = self.execute('INSERT CWAttribute X: X cardinality "11", X defaultval %(default)s, ' - ' X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F ' - 'WHERE RT name "name", E name "Societe2", F name "String"', - {'default': Binary.zpickle('noname')})[0][0] - self._set_attr_perms(attreid) - concerne2_rdef_eid = self.execute( - 'INSERT CWRelation X: X cardinality "**", X relation_type RT, X from_entity E, X to_entity E ' - 'WHERE RT name "concerne2", E name "Societe2"')[0][0] - self._set_perms(concerne2_rdef_eid) - self.assertFalse('name' in schema['Societe2'].subject_relations()) - self.assertFalse('concerne2' in schema['Societe2'].subject_relations()) - self.assertFalse(self.index_exists('Societe2', 'name')) - self.commit() - self.assertTrue('name' in schema['Societe2'].subject_relations()) - self.assertTrue('concerne2' in schema['Societe2'].subject_relations()) - self.assertTrue(self.index_exists('Societe2', 'name')) - # now we should be able to insert and query Societe2 - s2eid = self.execute('INSERT Societe2 X: X name "logilab"')[0][0] - self.execute('Societe2 X WHERE X name "logilab"') - self.execute('SET X concerne2 X WHERE X name "logilab"') - rset = self.execute('Any X WHERE X concerne2 Y') - self.assertEqual(rset.rows, [[s2eid]]) - # check that when a relation definition is deleted, existing relations are deleted - rdefeid = self.execute('INSERT CWRelation X: X cardinality "**", X relation_type RT, ' - ' X from_entity E, X to_entity E ' - 'WHERE RT name "concerne2", E name "CWUser"')[0][0] - self._set_perms(rdefeid) - self.commit() - self.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid}) - self.commit() - self.assertTrue('concerne2' in schema['CWUser'].subject_relations()) - self.assertFalse('concerne2' in schema['Societe2'].subject_relations()) - self.assertFalse(self.execute('Any X WHERE X concerne2 Y')) - # schema should be cleaned on delete (after commit) - self.execute('DELETE CWEType X WHERE X name "Societe2"') - self.execute('DELETE CWRType X WHERE X name "concerne2"') - self.assertTrue(self.index_exists('Societe2', 'name')) - self.assertTrue(schema.has_entity('Societe2')) - self.assertTrue(schema.has_relation('concerne2')) - self.commit() - self.assertFalse(self.index_exists('Societe2', 'name')) - self.assertFalse(schema.has_entity('Societe2')) - self.assertFalse(schema.has_entity('concerne2')) - self.assertFalse('concerne2' in schema['CWUser'].subject_relations()) + with self.admin_access.repo_cnx() as cnx: + schema = self.repo.schema + self.assertFalse(schema.has_entity('Societe2')) + self.assertFalse(schema.has_entity('concerne2')) + # schema should be update on insertion (after commit) + eeid = cnx.execute('INSERT CWEType X: X name "Societe2", ' + 'X description "", X final FALSE')[0][0] + self._set_perms(cnx, eeid) + cnx.execute('INSERT CWRType X: X name "concerne2", X description "", ' + 'X final FALSE, X symmetric FALSE') + self.assertFalse(schema.has_entity('Societe2')) + self.assertFalse(schema.has_entity('concerne2')) + # have to commit before adding definition relations + cnx.commit() + self.assertTrue(schema.has_entity('Societe2')) + self.assertTrue(schema.has_relation('concerne2')) + attreid = cnx.execute('INSERT CWAttribute X: X cardinality "11", ' + 'X defaultval %(default)s, X indexed TRUE, ' + 'X relation_type RT, X from_entity E, X to_entity F ' + 'WHERE RT name "name", E name "Societe2", ' + 'F name "String"', + {'default': Binary.zpickle('noname')})[0][0] + self._set_attr_perms(cnx, attreid) + concerne2_rdef_eid = cnx.execute( + 'INSERT CWRelation X: X cardinality "**", X relation_type RT, ' + 'X from_entity E, X to_entity E ' + 'WHERE RT name "concerne2", E name "Societe2"')[0][0] + self._set_perms(cnx, concerne2_rdef_eid) + self.assertNotIn('name', schema['Societe2'].subject_relations()) + self.assertNotIn('concerne2', schema['Societe2'].subject_relations()) + self.assertFalse(self.index_exists(cnx, 'Societe2', 'name')) + cnx.commit() + self.assertIn('name', schema['Societe2'].subject_relations()) + self.assertIn('concerne2', schema['Societe2'].subject_relations()) + self.assertTrue(self.index_exists(cnx, 'Societe2', 'name')) + # now we should be able to insert and query Societe2 + s2eid = cnx.execute('INSERT Societe2 X: X name "logilab"')[0][0] + cnx.execute('Societe2 X WHERE X name "logilab"') + cnx.execute('SET X concerne2 X WHERE X name "logilab"') + rset = cnx.execute('Any X WHERE X concerne2 Y') + self.assertEqual(rset.rows, [[s2eid]]) + # check that when a relation definition is deleted, existing relations are deleted + rdefeid = cnx.execute('INSERT CWRelation X: X cardinality "**", X relation_type RT, ' + ' X from_entity E, X to_entity E ' + 'WHERE RT name "concerne2", E name "CWUser"')[0][0] + self._set_perms(cnx, rdefeid) + cnx.commit() + cnx.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid}) + cnx.commit() + self.assertIn('concerne2', schema['CWUser'].subject_relations()) + self.assertNotIn('concerne2', schema['Societe2'].subject_relations()) + self.assertFalse(cnx.execute('Any X WHERE X concerne2 Y')) + # schema should be cleaned on delete (after commit) + cnx.execute('DELETE CWEType X WHERE X name "Societe2"') + cnx.execute('DELETE CWRType X WHERE X name "concerne2"') + self.assertTrue(self.index_exists(cnx, 'Societe2', 'name')) + self.assertTrue(schema.has_entity('Societe2')) + self.assertTrue(schema.has_relation('concerne2')) + cnx.commit() + self.assertFalse(self.index_exists(cnx, 'Societe2', 'name')) + self.assertFalse(schema.has_entity('Societe2')) + self.assertFalse(schema.has_entity('concerne2')) + self.assertNotIn('concerne2', schema['CWUser'].subject_relations()) def test_metartype_with_nordefs(self): - META_RTYPES.add('custom_meta') - self.execute('INSERT CWRType X: X name "custom_meta", X description "", ' - 'X final FALSE, X symmetric FALSE') - self.commit() - eeid = self.execute('INSERT CWEType X: X name "NEWEtype", ' - 'X description "", X final FALSE')[0][0] - self._set_perms(eeid) - self.commit() - META_RTYPES.remove('custom_meta') + with self.admin_access.repo_cnx() as cnx: + META_RTYPES.add('custom_meta') + cnx.execute('INSERT CWRType X: X name "custom_meta", X description "", ' + 'X final FALSE, X symmetric FALSE') + cnx.commit() + eeid = cnx.execute('INSERT CWEType X: X name "NEWEtype", ' + 'X description "", X final FALSE')[0][0] + self._set_perms(cnx, eeid) + cnx.commit() + META_RTYPES.remove('custom_meta') def test_metartype_with_somerdefs(self): - META_RTYPES.add('custom_meta') - self.execute('INSERT CWRType X: X name "custom_meta", X description "", ' - 'X final FALSE, X symmetric FALSE') - self.commit() - rdefeid = self.execute('INSERT CWRelation X: X cardinality "**", X relation_type RT, ' - ' X from_entity E, X to_entity E ' - 'WHERE RT name "custom_meta", E name "CWUser"')[0][0] - self._set_perms(rdefeid) - self.commit() - eeid = self.execute('INSERT CWEType X: X name "NEWEtype", ' - 'X description "", X final FALSE')[0][0] - self._set_perms(eeid) - self.commit() - META_RTYPES.remove('custom_meta') + with self.admin_access.repo_cnx() as cnx: + META_RTYPES.add('custom_meta') + cnx.execute('INSERT CWRType X: X name "custom_meta", X description "", ' + 'X final FALSE, X symmetric FALSE') + cnx.commit() + rdefeid = cnx.execute('INSERT CWRelation X: X cardinality "**", X relation_type RT, ' + ' X from_entity E, X to_entity E ' + 'WHERE RT name "custom_meta", E name "CWUser"')[0][0] + self._set_perms(cnx, rdefeid) + cnx.commit() + eeid = cnx.execute('INSERT CWEType X: X name "NEWEtype", ' + 'X description "", X final FALSE')[0][0] + self._set_perms(cnx, eeid) + cnx.commit() + META_RTYPES.remove('custom_meta') def test_is_instance_of_insertions(self): - seid = self.execute('INSERT Transition T: T name "subdiv"')[0][0] - is_etypes = [etype for etype, in self.execute('Any ETN WHERE X eid %s, X is ET, ET name ETN' % seid)] - self.assertEqual(is_etypes, ['Transition']) - instanceof_etypes = [etype for etype, in self.execute('Any ETN WHERE X eid %s, X is_instance_of ET, ET name ETN' % seid)] - self.assertEqual(sorted(instanceof_etypes), ['BaseTransition', 'Transition']) - snames = [name for name, in self.execute('Any N WHERE S is BaseTransition, S name N')] - self.assertFalse('subdiv' in snames) - snames = [name for name, in self.execute('Any N WHERE S is_instance_of BaseTransition, S name N')] - self.assertTrue('subdiv' in snames) + with self.admin_access.repo_cnx() as cnx: + seid = cnx.execute('INSERT Transition T: T name "subdiv"')[0][0] + is_etypes = [etype for etype, in cnx.execute('Any ETN WHERE X eid %s, ' + 'X is ET, ET name ETN' % seid)] + self.assertEqual(is_etypes, ['Transition']) + instanceof_etypes = [etype + for etype, in cnx.execute('Any ETN WHERE X eid %s, ' + 'X is_instance_of ET, ET name ETN' + % seid)] + self.assertEqual(sorted(instanceof_etypes), ['BaseTransition', 'Transition']) + snames = [name for name, in cnx.execute('Any N WHERE S is BaseTransition, S name N')] + self.assertNotIn('subdiv', snames) + snames = [name for name, in cnx.execute('Any N WHERE S is_instance_of BaseTransition, ' + 'S name N')] + self.assertIn('subdiv', snames) def test_perms_synchronization_1(self): - schema = self.repo.schema - self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users'))) - self.assertTrue(self.execute('Any X, Y WHERE X is CWEType, X name "CWUser", Y is CWGroup, Y name "users"')[0]) - self.execute('DELETE X read_permission Y WHERE X is CWEType, X name "CWUser", Y name "users"') - self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users', ))) - self.commit() - self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers',))) - self.execute('SET X read_permission Y WHERE X is CWEType, X name "CWUser", Y name "users"') - self.commit() - self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users',))) + with self.admin_access.repo_cnx() as cnx: + schema = self.repo.schema + self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users'))) + self.assertTrue(cnx.execute('Any X, Y WHERE X is CWEType, X name "CWUser", ' + 'Y is CWGroup, Y name "users"')[0]) + cnx.execute('DELETE X read_permission Y WHERE X is CWEType, X name "CWUser", Y name "users"') + self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users', ))) + cnx.commit() + self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers',))) + cnx.execute('SET X read_permission Y WHERE X is CWEType, ' + 'X name "CWUser", Y name "users"') + cnx.commit() + self.assertEqual(schema['CWUser'].get_groups('read'), + set(('managers', 'users',))) def test_perms_synchronization_2(self): - schema = self.repo.schema['in_group'].rdefs[('CWUser', 'CWGroup')] - self.assertEqual(schema.get_groups('read'), set(('managers', 'users', 'guests'))) - self.execute('DELETE X read_permission Y WHERE X relation_type RT, RT name "in_group", Y name "guests"') - self.assertEqual(schema.get_groups('read'), set(('managers', 'users', 'guests'))) - self.commit() - self.assertEqual(schema.get_groups('read'), set(('managers', 'users'))) - self.execute('SET X read_permission Y WHERE X relation_type RT, RT name "in_group", Y name "guests"') - self.assertEqual(schema.get_groups('read'), set(('managers', 'users'))) - self.commit() - self.assertEqual(schema.get_groups('read'), set(('managers', 'users', 'guests'))) + with self.admin_access.repo_cnx() as cnx: + schema = self.repo.schema['in_group'].rdefs[('CWUser', 'CWGroup')] + self.assertEqual(schema.get_groups('read'), + set(('managers', 'users', 'guests'))) + cnx.execute('DELETE X read_permission Y WHERE X relation_type RT, ' + 'RT name "in_group", Y name "guests"') + self.assertEqual(schema.get_groups('read'), + set(('managers', 'users', 'guests'))) + cnx.commit() + self.assertEqual(schema.get_groups('read'), + set(('managers', 'users'))) + cnx.execute('SET X read_permission Y WHERE X relation_type RT, ' + 'RT name "in_group", Y name "guests"') + self.assertEqual(schema.get_groups('read'), + set(('managers', 'users'))) + cnx.commit() + self.assertEqual(schema.get_groups('read'), + set(('managers', 'users', 'guests'))) def test_nonregr_user_edit_itself(self): - ueid = self.session.user.eid - groupeids = [eid for eid, in self.execute('CWGroup G WHERE G name in ("managers", "users")')] - self.execute('DELETE X in_group Y WHERE X eid %s' % ueid) - self.execute('SET X surname "toto" WHERE X eid %s' % ueid) - self.execute('SET X in_group Y WHERE X eid %s, Y name "managers"' % ueid) - self.commit() - eeid = self.execute('Any X WHERE X is CWEType, X name "CWEType"')[0][0] - self.execute('DELETE X read_permission Y WHERE X eid %s' % eeid) - self.execute('SET X final FALSE WHERE X eid %s' % eeid) - self.execute('SET X read_permission Y WHERE X eid %s, Y eid in (%s, %s)' - % (eeid, groupeids[0], groupeids[1])) - self.commit() - self.execute('Any X WHERE X is CWEType, X name "CWEType"') + with self.admin_access.repo_cnx() as cnx: + ueid = cnx.user.eid + groupeids = [eid for eid, in cnx.execute('CWGroup G WHERE G name ' + 'in ("managers", "users")')] + cnx.execute('DELETE X in_group Y WHERE X eid %s' % ueid) + cnx.execute('SET X surname "toto" WHERE X eid %s' % ueid) + cnx.execute('SET X in_group Y WHERE X eid %s, Y name "managers"' % ueid) + cnx.commit() + eeid = cnx.execute('Any X WHERE X is CWEType, X name "CWEType"')[0][0] + cnx.execute('DELETE X read_permission Y WHERE X eid %s' % eeid) + cnx.execute('SET X final FALSE WHERE X eid %s' % eeid) + cnx.execute('SET X read_permission Y WHERE X eid %s, Y eid in (%s, %s)' + % (eeid, groupeids[0], groupeids[1])) + cnx.commit() + cnx.execute('Any X WHERE X is CWEType, X name "CWEType"') # schema modification hooks tests ######################################### def test_uninline_relation(self): - self.session.set_cnxset() - dbhelper = self.session.cnxset.source('system').dbhelper - sqlcursor = self.session.cnxset['system'] - self.assertTrue(self.schema['state_of'].inlined) - try: - self.execute('SET X inlined FALSE WHERE X name "state_of"') - self.assertTrue(self.schema['state_of'].inlined) - self.commit() - self.assertFalse(self.schema['state_of'].inlined) - self.assertFalse(self.index_exists('State', 'state_of')) - rset = self.execute('Any X, Y WHERE X state_of Y') - self.assertEqual(len(rset), 2) # user states - except Exception: - import traceback - traceback.print_exc() - finally: - self.execute('SET X inlined TRUE WHERE X name "state_of"') - self.assertFalse(self.schema['state_of'].inlined) - self.commit() - self.assertTrue(self.schema['state_of'].inlined) - self.assertTrue(self.index_exists('State', 'state_of')) - rset = self.execute('Any X, Y WHERE X state_of Y') - self.assertEqual(len(rset), 2) + with self.admin_access.repo_cnx() as cnx: + try: + self.assertTrue(self.schema['state_of'].inlined) + cnx.execute('SET X inlined FALSE WHERE X name "state_of"') + self.assertTrue(self.schema['state_of'].inlined) + cnx.commit() + self.assertFalse(self.schema['state_of'].inlined) + self.assertFalse(self.index_exists(cnx, 'State', 'state_of')) + rset = cnx.execute('Any X, Y WHERE X state_of Y') + self.assertEqual(len(rset), 2) # user states + finally: + cnx.execute('SET X inlined TRUE WHERE X name "state_of"') + self.assertFalse(self.schema['state_of'].inlined) + cnx.commit() + self.assertTrue(self.schema['state_of'].inlined) + self.assertTrue(self.index_exists(cnx, 'State', 'state_of')) + rset = cnx.execute('Any X, Y WHERE X state_of Y') + self.assertEqual(len(rset), 2) def test_indexed_change(self): - self.session.set_cnxset() - dbhelper = self.session.cnxset.source('system').dbhelper - sqlcursor = self.session.cnxset['system'] - try: - self.execute('SET X indexed FALSE WHERE X relation_type R, R name "name"') - self.assertTrue(self.schema['name'].rdef('Workflow', 'String').indexed) - self.assertTrue(self.index_exists('Workflow', 'name')) - self.commit() - self.assertFalse(self.schema['name'].rdef('Workflow', 'String').indexed) - self.assertFalse(self.index_exists('Workflow', 'name')) - finally: - self.execute('SET X indexed TRUE WHERE X relation_type R, R name "name"') - self.assertFalse(self.schema['name'].rdef('Workflow', 'String').indexed) - self.assertFalse(self.index_exists('Workflow', 'name')) - self.commit() - self.assertTrue(self.schema['name'].rdef('Workflow', 'String').indexed) - self.assertTrue(self.index_exists('Workflow', 'name')) + with self.admin_access.repo_cnx() as cnx: + try: + cnx.execute('SET X indexed FALSE WHERE X relation_type R, R name "name"') + self.assertTrue(self.schema['name'].rdef('Workflow', 'String').indexed) + self.assertTrue(self.index_exists(cnx, 'Workflow', 'name')) + cnx.commit() + self.assertFalse(self.schema['name'].rdef('Workflow', 'String').indexed) + self.assertFalse(self.index_exists(cnx, 'Workflow', 'name')) + finally: + cnx.execute('SET X indexed TRUE WHERE X relation_type R, R name "name"') + self.assertFalse(self.schema['name'].rdef('Workflow', 'String').indexed) + self.assertFalse(self.index_exists(cnx, 'Workflow', 'name')) + cnx.commit() + self.assertTrue(self.schema['name'].rdef('Workflow', 'String').indexed) + self.assertTrue(self.index_exists(cnx, 'Workflow', 'name')) def test_unique_change(self): - self.session.set_cnxset() - dbhelper = self.session.cnxset.source('system').dbhelper - sqlcursor = self.session.cnxset['system'] - try: - self.execute('INSERT CWConstraint X: X cstrtype CT, DEF constrained_by X ' - 'WHERE CT name "UniqueConstraint", DEF relation_type RT, DEF from_entity E,' - 'RT name "name", E name "Workflow"') - self.assertFalse(self.schema['Workflow'].has_unique_values('name')) - self.assertFalse(self.index_exists('Workflow', 'name', unique=True)) - self.commit() - self.assertTrue(self.schema['Workflow'].has_unique_values('name')) - self.assertTrue(self.index_exists('Workflow', 'name', unique=True)) - finally: - self.execute('DELETE DEF constrained_by X WHERE X cstrtype CT, ' - 'CT name "UniqueConstraint", DEF relation_type RT, DEF from_entity E,' - 'RT name "name", E name "Workflow"') - self.assertTrue(self.schema['Workflow'].has_unique_values('name')) - self.assertTrue(self.index_exists('Workflow', 'name', unique=True)) - self.commit() - self.assertFalse(self.schema['Workflow'].has_unique_values('name')) - self.assertFalse(self.index_exists('Workflow', 'name', unique=True)) + with self.admin_access.repo_cnx() as cnx: + try: + eid = cnx.execute('INSERT CWConstraint X: X cstrtype CT, DEF constrained_by X ' + 'WHERE CT name "UniqueConstraint", DEF relation_type RT, ' + 'DEF from_entity E, RT name "name", ' + 'E name "Workflow"').rows[0][0] + self.assertFalse(self.schema['Workflow'].has_unique_values('name')) + self.assertFalse(self.index_exists(cnx, 'Workflow', 'name', unique=True)) + cnx.commit() + self.assertTrue(self.schema['Workflow'].has_unique_values('name')) + self.assertTrue(self.index_exists(cnx, 'Workflow', 'name', unique=True)) + finally: + cnx.execute('DELETE CWConstraint C WHERE C eid %(eid)s', {'eid': eid}) + cnx.commit() + self.assertFalse(self.schema['Workflow'].has_unique_values('name')) + self.assertFalse(self.index_exists(cnx, 'Workflow', 'name', unique=True)) def test_required_change_1(self): - self.execute('SET DEF cardinality "?1" ' - 'WHERE DEF relation_type RT, DEF from_entity E,' - 'RT name "title", E name "Bookmark"') - self.commit() - # should now be able to add bookmark without title - self.execute('INSERT Bookmark X: X path "/view"') - self.commit() + with self.admin_access.repo_cnx() as cnx: + cnx.execute('SET DEF cardinality "?1" ' + 'WHERE DEF relation_type RT, DEF from_entity E,' + 'RT name "title", E name "Bookmark"') + cnx.commit() + # should now be able to add bookmark without title + cnx.execute('INSERT Bookmark X: X path "/view"') + cnx.commit() def test_required_change_2(self): - self.execute('SET DEF cardinality "11" ' - 'WHERE DEF relation_type RT, DEF from_entity E,' - 'RT name "surname", E name "CWUser"') - self.commit() - # should not be able anymore to add cwuser without surname - req = self.request() - self.assertRaises(ValidationError, self.create_user, req, "toto") - self.rollback() - self.execute('SET DEF cardinality "?1" ' - 'WHERE DEF relation_type RT, DEF from_entity E,' - 'RT name "surname", E name "CWUser"') - self.commit() - + with self.admin_access.repo_cnx() as cnx: + cnx.execute('SET DEF cardinality "11" ' + 'WHERE DEF relation_type RT, DEF from_entity E,' + 'RT name "surname", E name "CWUser"') + cnx.commit() + # should not be able anymore to add cwuser without surname + self.assertRaises(ValidationError, self.create_user, cnx, "toto") + cnx.rollback() + cnx.execute('SET DEF cardinality "?1" ' + 'WHERE DEF relation_type RT, DEF from_entity E,' + 'RT name "surname", E name "CWUser"') + cnx.commit() def test_add_attribute_to_base_class(self): - attreid = self.execute('INSERT CWAttribute X: X cardinality "11", X defaultval %(default)s, ' - 'X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F ' - 'WHERE RT name "messageid", E name "BaseTransition", F name "String"', - {'default': Binary.zpickle('noname')})[0][0] - assert self.execute('SET X read_permission Y WHERE X eid %(x)s, Y name "managers"', - {'x': attreid}) - self.commit() - self.schema.rebuild_infered_relations() - self.assertTrue('Transition' in self.schema['messageid'].subjects()) - self.assertTrue('WorkflowTransition' in self.schema['messageid'].subjects()) - self.execute('Any X WHERE X is_instance_of BaseTransition, X messageid "hop"') + with self.admin_access.repo_cnx() as cnx: + attreid = cnx.execute('INSERT CWAttribute X: X cardinality "11", X defaultval %(default)s, ' + 'X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F ' + 'WHERE RT name "messageid", E name "BaseTransition", F name "String"', + {'default': Binary.zpickle('noname')})[0][0] + assert cnx.execute('SET X read_permission Y WHERE X eid %(x)s, Y name "managers"', + {'x': attreid}) + cnx.commit() + self.schema.rebuild_infered_relations() + self.assertIn('Transition', self.schema['messageid'].subjects()) + self.assertIn('WorkflowTransition', self.schema['messageid'].subjects()) + cnx.execute('Any X WHERE X is_instance_of BaseTransition, X messageid "hop"') def test_change_fulltextindexed(self): - req = self.request() - target = req.create_entity(u'Email', messageid=u'1234', - subject=u'rick.roll@dance.com') - self.commit() - rset = req.execute('Any X WHERE X has_text "rick.roll"') - self.assertIn(target.eid, [item[0] for item in rset]) - assert req.execute('SET A fulltextindexed FALSE ' - 'WHERE E is CWEType, E name "Email", A is CWAttribute,' - 'A from_entity E, A relation_type R, R name "subject"') - self.commit() - rset = req.execute('Any X WHERE X has_text "rick.roll"') - self.assertFalse(rset) - assert req.execute('SET A fulltextindexed TRUE ' - 'WHERE A from_entity E, A relation_type R, ' - 'E name "Email", R name "subject"') - self.commit() - rset = req.execute('Any X WHERE X has_text "rick.roll"') - self.assertIn(target.eid, [item[0] for item in rset]) + with self.admin_access.repo_cnx() as cnx: + target = cnx.create_entity(u'Email', messageid=u'1234', + subject=u'rick.roll@dance.com') + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text "rick.roll"') + self.assertIn(target.eid, [item[0] for item in rset]) + assert cnx.execute('SET A fulltextindexed FALSE ' + 'WHERE E is CWEType, E name "Email", A is CWAttribute,' + 'A from_entity E, A relation_type R, R name "subject"') + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text "rick.roll"') + self.assertFalse(rset) + assert cnx.execute('SET A fulltextindexed TRUE ' + 'WHERE A from_entity E, A relation_type R, ' + 'E name "Email", R name "subject"') + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text "rick.roll"') + self.assertIn(target.eid, [item[0] for item in rset]) def test_change_fulltext_container(self): - req = self.request() - target = req.create_entity(u'EmailAddress', address=u'rick.roll@dance.com') - target.cw_set(reverse_use_email=req.user) - self.commit() - rset = req.execute('Any X WHERE X has_text "rick.roll"') - self.assertIn(req.user.eid, [item[0] for item in rset]) - assert self.execute('SET R fulltext_container NULL ' - 'WHERE R name "use_email"') - self.commit() - rset = self.execute('Any X WHERE X has_text "rick.roll"') - self.assertIn(target.eid, [item[0] for item in rset]) - assert self.execute('SET R fulltext_container "subject" ' - 'WHERE R name "use_email"') - self.commit() - rset = req.execute('Any X WHERE X has_text "rick.roll"') - self.assertIn(req.user.eid, [item[0] for item in rset]) + with self.admin_access.repo_cnx() as cnx: + target = cnx.create_entity(u'EmailAddress', address=u'rick.roll@dance.com') + target.cw_set(reverse_use_email=cnx.user) + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text "rick.roll"') + self.assertIn(cnx.user.eid, [item[0] for item in rset]) + assert cnx.execute('SET R fulltext_container NULL ' + 'WHERE R name "use_email"') + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text "rick.roll"') + self.assertIn(target.eid, [item[0] for item in rset]) + assert cnx.execute('SET R fulltext_container "subject" ' + 'WHERE R name "use_email"') + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text "rick.roll"') + self.assertIn(cnx.user.eid, [item[0] for item in rset]) def test_update_constraint(self): - rdef = self.schema['Transition'].rdef('type') - cstr = rdef.constraint_by_type('StaticVocabularyConstraint') - if not getattr(cstr, 'eid', None): - self.skipTest('start me alone') # bug in schema reloading, constraint's eid not restored - self.execute('SET X value %(v)s WHERE X eid %(x)s', - {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"}) - self.execute('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' - 'WHERE CT name %(ct)s, EDEF eid %(x)s', - {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid}) - self.commit() - cstr = rdef.constraint_by_type('StaticVocabularyConstraint') - self.assertEqual(cstr.values, (u'normal', u'auto', u'new')) - self.execute('INSERT Transition T: T name "hop", T type "new"') + with self.admin_access.repo_cnx() as cnx: + rdef = self.schema['Transition'].rdef('type') + cstr = rdef.constraint_by_type('StaticVocabularyConstraint') + if not getattr(cstr, 'eid', None): + # bug in schema reloading, constraint's eid not restored + self.skipTest('start me alone') + cnx.execute('SET X value %(v)s WHERE X eid %(x)s', + {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"}) + cnx.execute('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, ' + 'EDEF constrained_by X WHERE CT name %(ct)s, EDEF eid %(x)s', + {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid}) + cnx.commit() + cstr = rdef.constraint_by_type('StaticVocabularyConstraint') + self.assertEqual(cstr.values, (u'normal', u'auto', u'new')) + cnx.execute('INSERT Transition T: T name "hop", T type "new"') if __name__ == '__main__': unittest_main() diff -r 84738d495ffd -r 793377697c81 hooks/test/unittest_syncsession.py --- a/hooks/test/unittest_syncsession.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/test/unittest_syncsession.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -28,32 +28,46 @@ class CWPropertyHooksTC(CubicWebTC): def test_unexistant_cwproperty(self): - with self.assertRaises(ValidationError) as cm: - self.execute('INSERT CWProperty X: X pkey "bla.bla", X value "hop", X for_user U') - cm.exception.translate(unicode) - self.assertEqual(cm.exception.errors, {'pkey-subject': 'unknown property key bla.bla'}) - with self.assertRaises(ValidationError) as cm: - self.execute('INSERT CWProperty X: X pkey "bla.bla", X value "hop"') - cm.exception.translate(unicode) - self.assertEqual(cm.exception.errors, {'pkey-subject': 'unknown property key bla.bla'}) + with self.admin_access.web_request() as req: + with self.assertRaises(ValidationError) as cm: + req.execute('INSERT CWProperty X: X pkey "bla.bla", ' + 'X value "hop", X for_user U') + cm.exception.translate(unicode) + self.assertEqual(cm.exception.errors, + {'pkey-subject': 'unknown property key bla.bla'}) + + with self.assertRaises(ValidationError) as cm: + req.execute('INSERT CWProperty X: X pkey "bla.bla", X value "hop"') + cm.exception.translate(unicode) + self.assertEqual(cm.exception.errors, + {'pkey-subject': 'unknown property key bla.bla'}) def test_site_wide_cwproperty(self): - with self.assertRaises(ValidationError) as cm: - self.execute('INSERT CWProperty X: X pkey "ui.site-title", X value "hop", X for_user U') - self.assertEqual(cm.exception.errors, {'for_user-subject': "site-wide property can't be set for user"}) + with self.admin_access.web_request() as req: + with self.assertRaises(ValidationError) as cm: + req.execute('INSERT CWProperty X: X pkey "ui.site-title", ' + 'X value "hop", X for_user U') + self.assertEqual(cm.exception.errors, + {'for_user-subject': "site-wide property can't be set for user"}) def test_system_cwproperty(self): - with self.assertRaises(ValidationError) as cm: - self.execute('INSERT CWProperty X: X pkey "system.version.cubicweb", X value "hop", X for_user U') - self.assertEqual(cm.exception.errors, {'for_user-subject': "site-wide property can't be set for user"}) + with self.admin_access.web_request() as req: + with self.assertRaises(ValidationError) as cm: + req.execute('INSERT CWProperty X: X pkey "system.version.cubicweb", ' + 'X value "hop", X for_user U') + self.assertEqual(cm.exception.errors, + {'for_user-subject': "site-wide property can't be set for user"}) def test_bad_type_cwproperty(self): - with self.assertRaises(ValidationError) as cm: - self.execute('INSERT CWProperty X: X pkey "ui.language", X value "hop", X for_user U') - self.assertEqual(cm.exception.errors, {'value-subject': u'unauthorized value'}) - with self.assertRaises(ValidationError) as cm: - self.execute('INSERT CWProperty X: X pkey "ui.language", X value "hop"') - self.assertEqual(cm.exception.errors, {'value-subject': u'unauthorized value'}) + with self.admin_access.web_request() as req: + with self.assertRaises(ValidationError) as cm: + req.execute('INSERT CWProperty X: X pkey "ui.language", ' + 'X value "hop", X for_user U') + self.assertEqual(cm.exception.errors, + {'value-subject': u'unauthorized value'}) + with self.assertRaises(ValidationError) as cm: + req.execute('INSERT CWProperty X: X pkey "ui.language", X value "hop"') + self.assertEqual(cm.exception.errors, {'value-subject': u'unauthorized value'}) if __name__ == '__main__': from logilab.common.testlib import unittest_main diff -r 84738d495ffd -r 793377697c81 hooks/workflow.py --- a/hooks/workflow.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/workflow.py Wed Sep 24 18:04:30 2014 +0200 @@ -28,17 +28,13 @@ from cubicweb.server import hook -def _change_state(session, x, oldstate, newstate): - nocheck = session.transaction_data.setdefault('skip-security', set()) +def _change_state(cnx, x, oldstate, newstate): + nocheck = cnx.transaction_data.setdefault('skip-security', set()) nocheck.add((x, 'in_state', oldstate)) nocheck.add((x, 'in_state', newstate)) - # delete previous state first unless in_state isn't stored in the system - # source - fromsource = session.describe(x)[1] - if fromsource == 'system' or \ - not session.repo.sources_by_uri[fromsource].support_relation('in_state'): - session.delete_relation(x, 'in_state', oldstate) - session.add_relation(x, 'in_state', newstate) + # delete previous state first + cnx.delete_relation(x, 'in_state', oldstate) + cnx.add_relation(x, 'in_state', newstate) # operations ################################################################### @@ -48,17 +44,17 @@ entity = None # make pylint happy def precommit_event(self): - session = self.session + cnx = self.cnx entity = self.entity iworkflowable = entity.cw_adapt_to('IWorkflowable') # if there is an initial state and the entity's state is not set, # use the initial state as a default state - if not (session.deleted_in_transaction(entity.eid) or entity.in_state) \ + if not (cnx.deleted_in_transaction(entity.eid) or entity.in_state) \ and iworkflowable.current_workflow: state = iworkflowable.current_workflow.initial if state: - session.add_relation(entity.eid, 'in_state', state.eid) - _FireAutotransitionOp(session, entity=entity) + cnx.add_relation(entity.eid, 'in_state', state.eid) + _FireAutotransitionOp(cnx, entity=entity) class _FireAutotransitionOp(hook.Operation): """try to fire auto transition after state changes""" @@ -80,11 +76,11 @@ def precommit_event(self): # notice that enforcement that new workflow apply to the entity's type is # done by schema rule, no need to check it here - session = self.session - pendingeids = session.transaction_data.get('pendingeids', ()) + cnx = self.cnx + pendingeids = cnx.transaction_data.get('pendingeids', ()) if self.eid in pendingeids: return - entity = session.entity_from_eid(self.eid) + entity = cnx.entity_from_eid(self.eid) iworkflowable = entity.cw_adapt_to('IWorkflowable') # check custom workflow has not been rechanged to another one in the same # transaction @@ -100,13 +96,13 @@ # if there are no history, simply go to new workflow's initial state if not iworkflowable.workflow_history: if iworkflowable.current_state.eid != deststate.eid: - _change_state(session, entity.eid, + _change_state(cnx, entity.eid, iworkflowable.current_state.eid, deststate.eid) - _FireAutotransitionOp(session, entity=entity) + _FireAutotransitionOp(cnx, entity=entity) return - msg = session._('workflow changed to "%s"') - msg %= session._(mainwf.name) - session.transaction_data[(entity.eid, 'customwf')] = self.wfeid + msg = cnx._('workflow changed to "%s"') + msg %= cnx._(mainwf.name) + cnx.transaction_data[(entity.eid, 'customwf')] = self.wfeid iworkflowable.change_state(deststate, msg, u'text/plain') @@ -114,7 +110,7 @@ treid = None # make pylint happy def precommit_event(self): - tr = self.session.entity_from_eid(self.treid) + tr = self.cnx.entity_from_eid(self.treid) outputs = set() for ep in tr.subworkflow_exit: if ep.subwf_state.eid in outputs: @@ -127,7 +123,7 @@ forentity = trinfo = None # make pylint happy def precommit_event(self): - session = self.session + cnx = self.cnx forentity = self.forentity iworkflowable = forentity.cw_adapt_to('IWorkflowable') trinfo = self.trinfo @@ -141,8 +137,8 @@ if tostate is not None: # reached an exit point msg = _('exiting from subworkflow %s') - msg %= session._(iworkflowable.current_workflow.name) - session.transaction_data[(forentity.eid, 'subwfentrytr')] = True + msg %= cnx._(iworkflowable.current_workflow.name) + cnx.transaction_data[(forentity.eid, 'subwfentrytr')] = True iworkflowable.change_state(tostate, msg, u'text/plain', tr=wftr) @@ -177,7 +173,7 @@ events = ('before_add_entity',) def __call__(self): - session = self._cw + cnx = self._cw entity = self.entity # first retreive entity to which the state change apply try: @@ -185,15 +181,15 @@ except KeyError: msg = _('mandatory relation') raise validation_error(entity, {('wf_info_for', 'subject'): msg}) - forentity = session.entity_from_eid(foreid) + forentity = cnx.entity_from_eid(foreid) # see comment in the TrInfo entity definition entity.cw_edited['tr_count']=len(forentity.reverse_wf_info_for) iworkflowable = forentity.cw_adapt_to('IWorkflowable') # then check it has a workflow set, unless we're in the process of changing # entity's workflow - if session.transaction_data.get((forentity.eid, 'customwf')): - wfeid = session.transaction_data[(forentity.eid, 'customwf')] - wf = session.entity_from_eid(wfeid) + if cnx.transaction_data.get((forentity.eid, 'customwf')): + wfeid = cnx.transaction_data[(forentity.eid, 'customwf')] + wf = cnx.entity_from_eid(wfeid) else: wf = iworkflowable.current_workflow if wf is None: @@ -205,9 +201,9 @@ msg = _('related entity has no state') raise validation_error(entity, {None: msg}) # True if we are coming back from subworkflow - swtr = session.transaction_data.pop((forentity.eid, 'subwfentrytr'), None) - cowpowers = (session.user.is_in_group('managers') - or not session.write_security) + swtr = cnx.transaction_data.pop((forentity.eid, 'subwfentrytr'), None) + cowpowers = (cnx.user.is_in_group('managers') + or not cnx.write_security) # no investigate the requested state change... try: treid = entity.cw_attr_cache['by_transition'] @@ -228,7 +224,7 @@ else: # check transition is valid and allowed, unless we're coming back # from subworkflow - tr = session.entity_from_eid(treid) + tr = cnx.entity_from_eid(treid) if swtr is None: qname = ('by_transition', 'subject') if tr is None: @@ -247,7 +243,7 @@ msg = _("transition isn't allowed") raise validation_error(entity, {('by_transition', 'subject'): msg}) if swtr is None: - deststate = session.entity_from_eid(deststateeid) + deststate = cnx.entity_from_eid(deststateeid) if not cowpowers and deststate is None: msg = _("state doesn't belong to entity's workflow") raise validation_error(entity, {('to_state', 'subject'): msg}) @@ -256,10 +252,10 @@ # everything is ok, add missing information on the trinfo entity entity.cw_edited['from_state'] = fromstate.eid entity.cw_edited['to_state'] = deststateeid - nocheck = session.transaction_data.setdefault('skip-security', set()) + nocheck = cnx.transaction_data.setdefault('skip-security', set()) nocheck.add((entity.eid, 'from_state', fromstate.eid)) nocheck.add((entity.eid, 'to_state', deststateeid)) - _FireAutotransitionOp(session, entity=forentity) + _FireAutotransitionOp(cnx, entity=forentity) class FiredTransitionHook(WorkflowHook): @@ -289,12 +285,12 @@ category = 'integrity' def __call__(self): - session = self._cw - nocheck = session.transaction_data.get('skip-security', ()) + cnx = self._cw + nocheck = cnx.transaction_data.get('skip-security', ()) if (self.eidfrom, 'in_state', self.eidto) in nocheck: # state changed through TrInfo insertion, so we already know it's ok return - entity = session.entity_from_eid(self.eidfrom) + entity = cnx.entity_from_eid(self.eidfrom) iworkflowable = entity.cw_adapt_to('IWorkflowable') mainwf = iworkflowable.main_workflow if mainwf is None: diff -r 84738d495ffd -r 793377697c81 hooks/zmq.py --- a/hooks/zmq.py Wed Sep 24 17:35:59 2014 +0200 +++ b/hooks/zmq.py Wed Sep 24 18:04:30 2014 +0200 @@ -71,6 +71,7 @@ address = config.get('zmq-repository-address') if not address: return + self.repo.warning('remote access to the repository via zmq/pickle is deprecated') from cubicweb.server import cwzmq self.repo.zmq_repo_server = server = cwzmq.ZMQRepositoryServer(self.repo) server.connect(address) diff -r 84738d495ffd -r 793377697c81 i18n/de.po --- a/i18n/de.po Wed Sep 24 17:35:59 2014 +0200 +++ b/i18n/de.po Wed Sep 24 18:04:30 2014 +0200 @@ -118,10 +118,6 @@ msgstr "" #, python-format -msgid "%s relation should not be in mapped" -msgstr "" - -#, python-format msgid "%s software version of the database" msgstr "Software-Version der Datenbank %s" @@ -1276,9 +1272,6 @@ msgid "attribute" msgstr "Attribut" -msgid "attribute/relation can't be mapped, only entity and relation types" -msgstr "" - msgid "august" msgstr "August" @@ -1401,22 +1394,12 @@ msgstr "" #, python-format -msgid "can't connect to source %s, some data may be missing" -msgstr "Keine Verbindung zu der Quelle %s, einige Daten könnten fehlen" - -#, python-format msgid "can't display data, unexpected error: %s" msgstr "Kann die Daten aufgrund des folgenden Fehlers nicht anzeigen: %s" msgid "can't have multiple exits on the same state" msgstr "Mehrere Ausgänge aus demselben Zustand nicht möglich." -msgid "can't mix dontcross and maycross options" -msgstr "" - -msgid "can't mix dontcross and write options" -msgstr "" - #, python-format msgid "can't parse %(value)r (expected %(format)s)" msgstr "" @@ -2426,12 +2409,6 @@ msgid "error while publishing ReST text" msgstr "Fehler beim Ãœbersetzen von reST" -#, python-format -msgid "error while querying source %s, some data may be missing" -msgstr "" -"Fehler beim Zugriff auf Quelle %s, möglicherweise sind die Daten " -"unvollständig." - msgid "exit state must be a subworkflow state" msgstr "Exit-Zustand muss ein Subworkflow-Zustand sein." @@ -2906,10 +2883,6 @@ msgid "inlined" msgstr "eingereiht" -#, python-format -msgid "inlined relation %(rtype)s of %(etype)s should be supported" -msgstr "" - msgid "instance home" msgstr "Startseite der Instanz" @@ -3297,10 +3270,6 @@ msgid "no edited fields specified" msgstr "" -#, python-format -msgid "no edited fields specified for entity %s" -msgstr "kein Eingabefeld spezifiziert Für Entität %s" - msgid "no log to display" msgstr "" @@ -4275,10 +4244,6 @@ msgstr "(Externe) Entität nicht gefunden" #, python-format -msgid "unknown option(s): %s" -msgstr "" - -#, python-format msgid "unknown options %s" msgstr "" @@ -4585,13 +4550,16 @@ msgid "you have been logged out" msgstr "Sie sind jetzt abgemeldet." -#, python-format -msgid "you may want to specify something for %s" -msgstr "" - msgid "you should probably delete that property" msgstr "Sie sollten diese Eigenschaft wahrscheinlich löschen." -#, python-format -msgid "you should un-inline relation %s which is supported and may be crossed " -msgstr "" +#~ msgid "can't connect to source %s, some data may be missing" +#~ msgstr "Keine Verbindung zu der Quelle %s, einige Daten könnten fehlen" + +#~ msgid "error while querying source %s, some data may be missing" +#~ msgstr "" +#~ "Fehler beim Zugriff auf Quelle %s, möglicherweise sind die Daten " +#~ "unvollständig." + +#~ msgid "no edited fields specified for entity %s" +#~ msgstr "kein Eingabefeld spezifiziert Für Entität %s" diff -r 84738d495ffd -r 793377697c81 i18n/en.po --- a/i18n/en.po Wed Sep 24 17:35:59 2014 +0200 +++ b/i18n/en.po Wed Sep 24 18:04:30 2014 +0200 @@ -110,10 +110,6 @@ msgstr "" #, python-format -msgid "%s relation should not be in mapped" -msgstr "" - -#, python-format msgid "%s software version of the database" msgstr "" @@ -1233,9 +1229,6 @@ msgid "attribute" msgstr "" -msgid "attribute/relation can't be mapped, only entity and relation types" -msgstr "" - msgid "august" msgstr "" @@ -1358,22 +1351,12 @@ msgstr "" #, python-format -msgid "can't connect to source %s, some data may be missing" -msgstr "" - -#, python-format msgid "can't display data, unexpected error: %s" msgstr "" msgid "can't have multiple exits on the same state" msgstr "" -msgid "can't mix dontcross and maycross options" -msgstr "" - -msgid "can't mix dontcross and write options" -msgstr "" - #, python-format msgid "can't parse %(value)r (expected %(format)s)" msgstr "" @@ -2375,10 +2358,6 @@ msgid "error while publishing ReST text" msgstr "" -#, python-format -msgid "error while querying source %s, some data may be missing" -msgstr "" - msgid "exit state must be a subworkflow state" msgstr "" @@ -2833,10 +2812,6 @@ msgid "inlined" msgstr "inlined" -#, python-format -msgid "inlined relation %(rtype)s of %(etype)s should be supported" -msgstr "" - msgid "instance home" msgstr "" @@ -3213,10 +3188,6 @@ msgid "no edited fields specified" msgstr "" -#, python-format -msgid "no edited fields specified for entity %s" -msgstr "" - msgid "no log to display" msgstr "" @@ -4175,10 +4146,6 @@ msgstr "" #, python-format -msgid "unknown option(s): %s" -msgstr "" - -#, python-format msgid "unknown options %s" msgstr "" @@ -4472,13 +4439,5 @@ msgid "you have been logged out" msgstr "" -#, python-format -msgid "you may want to specify something for %s" -msgstr "" - msgid "you should probably delete that property" msgstr "" - -#, python-format -msgid "you should un-inline relation %s which is supported and may be crossed " -msgstr "" diff -r 84738d495ffd -r 793377697c81 i18n/es.po --- a/i18n/es.po Wed Sep 24 17:35:59 2014 +0200 +++ b/i18n/es.po Wed Sep 24 18:04:30 2014 +0200 @@ -1,17 +1,22 @@ # cubicweb i18n catalog -# Copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# Copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # Logilab +# Translators: +# CreaLibre , 2014 msgid "" msgstr "" -"Project-Id-Version: cubicweb 2.46.0\n" -"POT-Creation-Date: \n" -"PO-Revision-Date: 2011-05-03 12:53-0600\n" +"Project-Id-Version: Cubicweb\n" +"POT-Creation-Date: 2006-01-12 17:35+CET\n" +"PO-Revision-Date: 2014-03-04 08:10+0000\n" "Last-Translator: CreaLibre \n" -"Language-Team: es \n" -"Language: \n" +"Language-Team: Spanish (http://www.transifex.com/projects/p/cubicweb/" +"language/es/)\n" +"Language: es\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" #, python-format msgid "" @@ -26,7 +31,7 @@ msgstr "" "\n" "%(user)s ha cambiado su estado de <%(previous_state)s> hacia <" -"%(current_state)s> por la entidad\n" +"%(current_state)s> en la entidad\n" "'%(title)s'\n" "\n" "%(comment)s\n" @@ -52,11 +57,11 @@ #, python-format msgid "%(KEY-cstr)s constraint failed for value %(KEY-value)r" -msgstr "" +msgstr "%(KEY-cstr)s restricción errónea para el valor %(KEY-value)r" #, python-format msgid "%(KEY-value)r doesn't match the %(KEY-regexp)r regular expression" -msgstr "" +msgstr "%(KEY-value)r no corresponde a la expresión regular %(KEY-regexp)r" #, python-format msgid "%(attr)s set to %(newvalue)s" @@ -116,11 +121,7 @@ #, python-format msgid "%s is part of violated unicity constraint" -msgstr "" - -#, python-format -msgid "%s relation should not be in mapped" -msgstr "la relación %s no debería estar mapeada" +msgstr "%s pertenece a una restricción de unidad no respectada" #, python-format msgid "%s software version of the database" @@ -138,6 +139,8 @@ msgid "" "'%s' action for in_state relation should at least have 'linkattr=name' option" msgstr "" +"'%s' acción en la relación in_state debe por lo menos tener la opción " +"'linkattr=name'" #, python-format msgid "'%s' action requires 'linkattr' option" @@ -148,7 +151,7 @@ #, python-format msgid "(suppressed) entity #%d" -msgstr "" +msgstr "(eliminada) entidad #%d" msgid "**" msgstr "0..n 0..n" @@ -200,7 +203,7 @@ "div>" msgid "" -msgstr "" +msgstr "" msgid "" msgstr "" @@ -225,7 +228,7 @@ #, python-format msgid "Added relation : %(entity_from)s %(rtype)s %(entity_to)s" -msgstr "" +msgstr "Relación agregada : %(entity_from)s %(rtype)s %(entity_to)s" msgid "Any" msgstr "Cualquiera" @@ -243,10 +246,10 @@ msgstr "Transiciones (abstractas)" msgid "BigInt" -msgstr "" +msgstr "Big integer" msgid "BigInt_plural" -msgstr "" +msgstr "Big integers" msgid "Bookmark" msgstr "Favorito" @@ -274,7 +277,7 @@ #, python-format msgid "By %(user)s on %(dt)s [%(undo_link)s]" -msgstr "" +msgstr "Por %(user)s en %(dt)s [%(undo_link)s]" msgid "Bytes" msgstr "Bytes" @@ -307,10 +310,10 @@ msgstr "Restricciones" msgid "CWDataImport" -msgstr "" +msgstr "Importación de Datos" msgid "CWDataImport_plural" -msgstr "" +msgstr "Importaciones de Datos" msgid "CWEType" msgstr "Tipo de entidad" @@ -425,19 +428,21 @@ "tipo ya no existe" msgid "Click to sort on this column" -msgstr "" +msgstr "Seleccione para ordenar columna" msgid "" "Configuration of the system source goes to the 'sources' file, not in the " "database" msgstr "" +"La configuración de la fuente sistema va en el archivo \"Sources\"/Fuentes, " +"y no en la base de datos." #, python-format msgid "Created %(etype)s : %(entity)s" -msgstr "" +msgstr "Se creó %(etype)s : %(entity)s" msgid "DEBUG" -msgstr "" +msgstr "DEPURAR" msgid "Date" msgstr "Fecha" @@ -459,23 +464,23 @@ #, python-format msgid "Delete relation : %(entity_from)s %(rtype)s %(entity_to)s" -msgstr "" +msgstr "Eliminar relación : %(entity_from)s %(rtype)s %(entity_to)s" #, python-format msgid "Deleted %(etype)s : %(entity)s" -msgstr "" +msgstr "Se eliminó %(etype)s : %(entity)s" msgid "Detected problems" msgstr "Problemas detectados" msgid "Do you want to delete the following element(s)?" -msgstr "Desea eliminar el(los) elemento(s) siguiente(s)" +msgstr "Desea eliminar el/los elemento(s) a continuación?" msgid "Download schema as OWL" msgstr "Descargar el esquema en formato OWL" msgid "ERROR" -msgstr "" +msgstr "ERROR" msgid "EmailAddress" msgstr "Correo Electrónico" @@ -500,7 +505,7 @@ msgstr "Uris externos" msgid "FATAL" -msgstr "" +msgstr "FATAL" msgid "Float" msgstr "Número flotante" @@ -521,7 +526,7 @@ msgstr "Ayuda" msgid "INFO" -msgstr "" +msgstr "INFO" msgid "Instance" msgstr "Instancia" @@ -542,7 +547,7 @@ msgstr "Duraciones" msgid "Link:" -msgstr "" +msgstr "Liga:" msgid "Looked up classes" msgstr "Clases buscadas" @@ -554,7 +559,7 @@ msgstr "Gestión de seguridad" msgid "Message threshold" -msgstr "" +msgstr "Límite de mensajes" msgid "Most referenced classes" msgstr "Clases más referenciadas" @@ -578,7 +583,7 @@ msgstr "Agregar tipo de Restricción" msgid "New CWDataImport" -msgstr "" +msgstr "Nueva importación de datos" msgid "New CWEType" msgstr "Agregar tipo de entidad" @@ -795,7 +800,7 @@ msgstr "Este tipo de Restricción" msgid "This CWDataImport" -msgstr "" +msgstr "Esta importación de datos" msgid "This CWEType" msgstr "Este tipo de Entidad" @@ -858,6 +863,8 @@ "This action is forbidden. If you think it should be allowed, please contact " "the site administrator." msgstr "" +"Esta acción le es prohibida. Si cree que Ud. debería de tener autorización, " +"favor de contactar al administrador del sitio. " msgid "This entity type permissions:" msgstr "Permisos para este tipo de entidad:" @@ -886,23 +893,23 @@ "por línea." msgid "Undoable actions" -msgstr "" +msgstr "Acciones irreversibles" msgid "Undoing" -msgstr "" +msgstr "Deshaciendo" msgid "UniqueConstraint" msgstr "Restricción de Unicidad" msgid "Unknown source type" -msgstr "" +msgstr "tipo de fuente desconocida" msgid "Unreachable objects" msgstr "Objetos inaccesibles" #, python-format msgid "Updated %(etype)s : %(entity)s" -msgstr "" +msgstr "Se actualizó %(etype)s : %(entity)s" msgid "Used by:" msgstr "Utilizado por :" @@ -911,7 +918,7 @@ msgstr "Usuarios y grupos de administradores" msgid "WARNING" -msgstr "" +msgstr "ADVERTENCIA" msgid "Web server" msgstr "Servidor web" @@ -950,13 +957,13 @@ "en el campo siguiente." msgid "You can't change this relation" -msgstr "" +msgstr "Usted no puede modificar esta relación" msgid "You cannot remove the system source" -msgstr "" +msgstr "Usted no puede eliminar la fuente sistema" msgid "You cannot rename the system source" -msgstr "" +msgstr "Usted no puede Renombrar la fuente sistema" msgid "" "You have no access to this view or it can not be used to display the current " @@ -992,7 +999,7 @@ msgstr "un número flotante es requerido" msgid "a number (in seconds) or 20s, 10min, 24h or 4d are expected" -msgstr "" +msgstr "se espera un número (en segundos) ó 20s, 10min, 24h ó 4d " msgid "" "a simple cache entity characterized by a name and a validity date. The " @@ -1010,7 +1017,7 @@ msgstr "Clase de base abstracta para la transiciones" msgid "action menu" -msgstr "" +msgstr "acciones" msgid "action(s) on this selection" msgstr "Acción(es) en esta selección" @@ -1031,7 +1038,7 @@ msgstr "Agregar a los favoritos " msgid "add CWAttribute add_permission RQLExpression subject" -msgstr "" +msgstr "Expresión RQL de agregación" msgid "add CWAttribute constrained_by CWConstraint subject" msgstr "Restricción" @@ -1143,7 +1150,7 @@ msgctxt "CWAttribute" msgid "add_permission" -msgstr "" +msgstr "Permiso de agregar" # subject and object forms for each relation type # (no object form for final relation types) @@ -1182,7 +1189,7 @@ "ha sido agregada" msgid "additional type specific properties" -msgstr "" +msgstr "propiedades adicionales específicas al tipo" msgid "addrelated" msgstr "Agregar" @@ -1212,7 +1219,7 @@ #, python-format msgid "allowed values for \"action\" are %s" -msgstr "" +msgstr "los valores permitidos por \"acción\" son %s" msgid "allowed_transition" msgstr "transiciones autorizadas" @@ -1261,7 +1268,7 @@ msgstr "anónimo" msgid "anyrsetview" -msgstr "" +msgstr "vistas rset" msgid "april" msgstr "Abril" @@ -1283,11 +1290,6 @@ msgid "attribute" msgstr "Atributo" -msgid "attribute/relation can't be mapped, only entity and relation types" -msgstr "" -"los atributos y las relaciones no pueden ser mapeados, solamente los tipos " -"de entidad y de relación" - msgid "august" msgstr "Agosto" @@ -1299,19 +1301,20 @@ msgid "autocomputed attribute used to ensure transition coherency" msgstr "" +"Atributo automatizado usado para asegurar la coherencia en la transición" msgid "automatic" msgstr "Automático" #, python-format msgid "back to pagination (%s results)" -msgstr "" +msgstr "regresar a paginación (%s resultados)" msgid "bad value" msgstr "Valor erróneo" msgid "badly formatted url" -msgstr "" +msgstr "url mal formateado" msgid "base url" msgstr "Url de base" @@ -1398,7 +1401,7 @@ msgstr "Imposible de interpretar los tipos de entidades:" msgid "can only have one url" -msgstr "" +msgstr "solo puede tener un URL" msgid "can't be changed" msgstr "No puede ser modificado" @@ -1407,11 +1410,7 @@ msgstr "No puede ser eliminado" msgid "can't change this attribute" -msgstr "" - -#, python-format -msgid "can't connect to source %s, some data may be missing" -msgstr "no se puede conectar a la fuente %s, algunos datos pueden faltar" +msgstr "no puede modificar este atributo" #, python-format msgid "can't display data, unexpected error: %s" @@ -1420,12 +1419,6 @@ msgid "can't have multiple exits on the same state" msgstr "no puede tener varias salidas en el mismo estado" -msgid "can't mix dontcross and maycross options" -msgstr "no puede mezclar las opciones dontcross y maycross" - -msgid "can't mix dontcross and write options" -msgstr "no puede mezclar las opciones dontcross y write" - #, python-format msgid "can't parse %(value)r (expected %(format)s)" msgstr "no puede analizar %(value)r (formato requerido : %(format)s)" @@ -1435,16 +1428,22 @@ "can't restore entity %(eid)s of type %(eschema)s, target of %(rtype)s (eid " "%(value)s) does not exist any longer" msgstr "" +"no se pudo restaurar la entidad %(eid)s del tipo %(eschema)s, objetivo de " +"%(rtype)s (eid %(value)s) pues ésta ya no existe" #, python-format msgid "" "can't restore relation %(rtype)s of entity %(eid)s, this relation does not " "exist in the schema anymore." msgstr "" +"no se pudo restaurar la relación %(rtype)s de la entidad %(eid)s, esta " +"relación ya no existe en el esquema. " #, python-format msgid "can't restore state of entity %s, it has been deleted inbetween" msgstr "" +"no se puede restaurar el estado de la entidad %s, se ha borrado desde " +"entonces" #, python-format msgid "" @@ -1503,7 +1502,7 @@ msgstr "seleccione para editar este campo" msgid "close all" -msgstr "" +msgstr "cerrar todos" msgid "comment" msgstr "Comentario" @@ -1734,7 +1733,7 @@ msgid "" "creating RQLExpression (CWAttribute %(linkto)s add_permission RQLExpression)" -msgstr "" +msgstr "Creación de una expresión RQL para permitir agregar %(linkto)s" msgid "" "creating RQLExpression (CWAttribute %(linkto)s read_permission RQLExpression)" @@ -1849,7 +1848,7 @@ msgstr "Exportar en CSV" msgid "csv export (entities)" -msgstr "" +msgstr "Exportar a CSV (entidades)" msgid "ctxcomponents" msgstr "Componentes contextuales" @@ -1968,10 +1967,10 @@ msgstr "Workflow de" msgid "cw.groups-management" -msgstr "" +msgstr "grupos" msgid "cw.users-management" -msgstr "" +msgstr "usuarios" msgid "cw_for_source" msgstr "fuente" @@ -2002,18 +2001,18 @@ msgstr "tiene la configuración del host" msgid "cw_import_of" -msgstr "" +msgstr "fuente" msgctxt "CWDataImport" msgid "cw_import_of" -msgstr "" +msgstr "fuente" msgid "cw_import_of_object" -msgstr "" +msgstr "importación" msgctxt "CWSource" msgid "cw_import_of_object" -msgstr "" +msgstr "importación" msgid "cw_schema" msgstr "esquema" @@ -2071,7 +2070,7 @@ msgstr "Permisos" msgid "cwsource-imports" -msgstr "" +msgstr "importación" msgid "cwsource-main" msgstr "descripción" @@ -2120,7 +2119,7 @@ msgstr "Valor por defecto" msgid "default value as gziped pickled python object" -msgstr "" +msgstr "valor por defecto, en la forma de objeto python, al usar pickle y gzip" msgid "default workflow for an entity type" msgstr "Workflow por defecto para un tipo de entidad" @@ -2409,11 +2408,11 @@ msgstr "Html incrustado" msgid "end_timestamp" -msgstr "" +msgstr "horario final" msgctxt "CWDataImport" msgid "end_timestamp" -msgstr "" +msgstr "horario final" msgid "entities deleted" msgstr "Entidades eliminadas" @@ -2457,7 +2456,7 @@ msgstr "Actualización de la Entidad" msgid "entityview" -msgstr "" +msgstr "vistas de entidades" msgid "error" msgstr "error" @@ -2466,12 +2465,6 @@ msgstr "" "Se ha producido un error durante la interpretación del texto en formato ReST" -#, python-format -msgid "error while querying source %s, some data may be missing" -msgstr "" -"Un error ha ocurrido al interrogar %s, es posible que los \n" -"datos visibles se encuentren incompletos" - msgid "exit state must be a subworkflow state" msgstr "El estado de salida debe de ser un estado del Sub-Workflow" @@ -2500,14 +2493,14 @@ msgstr "Tipo" msgid "extra_props" -msgstr "" +msgstr "propiedades adicionales" msgctxt "CWAttribute" msgid "extra_props" -msgstr "" +msgstr "propiedades adicionales" msgid "facet-loading-msg" -msgstr "" +msgstr "procesando, espere por favor" msgid "facet.filters" msgstr "Filtros" @@ -2534,10 +2527,10 @@ msgstr "Faceta para las entidades \"finales\"" msgid "facets_datafeed.dataimport.status" -msgstr "" +msgstr "estado de la importación" msgid "facets_datafeed.dataimport.status_description" -msgstr "" +msgstr "Estado de la importación de datos" msgid "facets_etype-facet" msgstr "Faceta \"es de tipo\"" @@ -2564,7 +2557,7 @@ msgstr "Faceta en el estado" msgid "failed" -msgstr "" +msgstr "fallido" #, python-format msgid "failed to uniquify path (%s, %s)" @@ -2601,7 +2594,7 @@ msgstr "Amigo de un Amigo, FOAF" msgid "focus on this selection" -msgstr "" +msgstr "muestre esta selección" msgid "follow" msgstr "Seguir la liga" @@ -2732,16 +2725,16 @@ msgstr "Grupos" msgid "groups allowed to add entities/relations of this type" -msgstr "" +msgstr "grupos autorizados a agregar entidades/relaciones de este tipo" msgid "groups allowed to delete entities/relations of this type" -msgstr "" +msgstr "grupos autorizados a eliminar entidades/relaciones de este tipo" msgid "groups allowed to read entities/relations of this type" -msgstr "" +msgstr "grupos autorizados a leer entidades/relaciones de este tipo" msgid "groups allowed to update entities/relations of this type" -msgstr "" +msgstr "grupos autorizados a actualizar entidades/relaciones de este tipo" msgid "groups grant permissions to the user" msgstr "Los grupos otorgan los permisos al usuario" @@ -2756,7 +2749,7 @@ msgstr "Contiene el texto" msgid "header-center" -msgstr "" +msgstr "header - centro" msgid "header-left" msgstr "encabezado (izquierdo)" @@ -2857,7 +2850,7 @@ msgstr "Imagen" msgid "in progress" -msgstr "" +msgstr "en progreso" msgid "in_group" msgstr "En el grupo" @@ -2880,11 +2873,11 @@ msgstr "Estado de" msgid "in_synchronization" -msgstr "" +msgstr "sincronizado" msgctxt "CWSource" msgid "in_synchronization" -msgstr "" +msgstr "sincronizado" msgid "incontext" msgstr "En el contexto" @@ -2894,7 +2887,7 @@ #, python-format msgid "incorrect value (%(KEY-value)r) for type \"%(KEY-type)s\"" -msgstr "" +msgstr "el valor (%(KEY-value)r) es incorrecto para el tipo \"%(KEY-type)s\"" msgid "index this attribute's value in the plain text index" msgstr "Indexar el valor de este atributo en el índice de texto simple" @@ -2945,12 +2938,6 @@ msgid "inlined" msgstr "Inlined" -#, python-format -msgid "inlined relation %(rtype)s of %(etype)s should be supported" -msgstr "" -"la relación %(rtype)s del tipo de entidad %(etype)s debe ser aceptada " -"('inlined')" - msgid "instance home" msgstr "Repertorio de la Instancia" @@ -2970,7 +2957,7 @@ #, python-format msgid "invalid value %(KEY-value)s, it must be one of %(KEY-choices)s" -msgstr "" +msgstr "Valor %(KEY-value)s es incorrecto, seleccione entre %(KEY-choices)s" msgid "is" msgstr "es" @@ -3014,10 +3001,10 @@ msgstr "Enero" msgid "json-entities-export-view" -msgstr "" +msgstr "Exportación JSON (de entidades)" msgid "json-export-view" -msgstr "" +msgstr "Exportación JSON" msgid "july" msgstr "Julio" @@ -3048,7 +3035,7 @@ msgstr "Ultima conexión" msgid "latest import" -msgstr "" +msgstr "importaciones recientes" msgid "latest modification time of an entity" msgstr "Fecha de la última modificación de una entidad " @@ -3070,7 +3057,7 @@ msgstr "izquierda" msgid "line" -msgstr "" +msgstr "línea" msgid "" "link a property to the user which want this property customization. Unless " @@ -3105,11 +3092,11 @@ msgstr "Lista" msgid "log" -msgstr "" +msgstr "log" msgctxt "CWDataImport" msgid "log" -msgstr "" +msgstr "log" msgid "log in" msgstr "Acceder" @@ -3194,7 +3181,7 @@ msgstr "depuración (debugging) de fuga de memoria" msgid "message" -msgstr "" +msgstr "mensaje" #, python-format msgid "missing parameters for entity %s" @@ -3253,11 +3240,11 @@ msgctxt "CWSource" msgid "name" -msgstr "" +msgstr "nombre" msgctxt "CWUniqueTogetherConstraint" msgid "name" -msgstr "" +msgstr "nombre" msgctxt "State" msgid "name" @@ -3319,7 +3306,7 @@ msgstr "Nuevo" msgid "next page" -msgstr "" +msgstr "página siguiente" msgid "next_results" msgstr "Siguientes resultados" @@ -3328,20 +3315,16 @@ msgstr "No" msgid "no content next link" -msgstr "" +msgstr "no hay liga siguiente" msgid "no content prev link" -msgstr "" +msgstr "no existe liga previa" msgid "no edited fields specified" -msgstr "" - -#, python-format -msgid "no edited fields specified for entity %s" -msgstr "Ningún campo editable especificado para la entidad %s" +msgstr "ningún campo por editar especificado" msgid "no log to display" -msgstr "" +msgstr "no arrojó elementos para mostrar" msgid "no related entity" msgstr "No posee entidad asociada" @@ -3375,7 +3358,7 @@ msgstr "Noviembre" msgid "num. users" -msgstr "" +msgstr "Número de Usuarios" msgid "object" msgstr "Objeto" @@ -3475,7 +3458,7 @@ msgstr "Ruta" msgid "permalink to this message" -msgstr "" +msgstr "liga permanente a este mensaje" msgid "permission" msgstr "Permiso" @@ -3520,7 +3503,7 @@ msgstr "Preferencias" msgid "previous page" -msgstr "" +msgstr "página anterior" msgid "previous_results" msgstr "Resultados Anteriores" @@ -3552,7 +3535,7 @@ msgstr "Permisos" msgid "rdf export" -msgstr "" +msgstr "Exportación RDF" msgid "read" msgstr "Lectura" @@ -3727,22 +3710,22 @@ msgstr "Derecha" msgid "rql expression allowing to add entities/relations of this type" -msgstr "" +msgstr "Expresión RQL que permite AGREGAR entidades/relaciones de este tipo" msgid "rql expression allowing to delete entities/relations of this type" -msgstr "" +msgstr "Expresión RQL que permite ELIMINAR entidades/relaciones de este tipo" msgid "rql expression allowing to read entities/relations of this type" -msgstr "" +msgstr "Expresión RQL que permite LEER entidades/relaciones de este tipo" msgid "rql expression allowing to update entities/relations of this type" -msgstr "" +msgstr "Expresión RQL que permite ACTUALIZAR entidades/relaciones de este tipo" msgid "rql expressions" msgstr "Expresiones RQL" msgid "rss export" -msgstr "" +msgstr "Exportación RSS" msgid "same_as" msgstr "Idéntico a" @@ -3829,7 +3812,7 @@ msgstr "Información del servidor" msgid "severity" -msgstr "" +msgstr "severidad" msgid "" "should html fields being edited using fckeditor (a HTML WYSIWYG editor). " @@ -3868,7 +3851,7 @@ "Las transacciones más recientes modificaron esta entidad, anúlelas primero" msgid "some relations violate a unicity constraint" -msgstr "" +msgstr "algunas relaciones no respetan la restricción de unicidad" msgid "sorry, the server is unable to handle this query" msgstr "Lo sentimos, el servidor no puede manejar esta consulta" @@ -3908,19 +3891,21 @@ "start timestamp of the currently in synchronization, or NULL when no " "synchronization in progress." msgstr "" +"horario de inicio de la sincronización en curso, o NULL cuando no existe " +"sincronización en curso" msgid "start_timestamp" -msgstr "" +msgstr "horario inicio" msgctxt "CWDataImport" msgid "start_timestamp" -msgstr "" +msgstr "horario inicio" msgid "startup views" msgstr "Vistas de inicio" msgid "startupview" -msgstr "" +msgstr "Vistas de Inicio" msgid "state" msgstr "Estado" @@ -3962,11 +3947,11 @@ msgstr "Tiene por Estado" msgid "status" -msgstr "" +msgstr "estado" msgctxt "CWDataImport" msgid "status" -msgstr "" +msgstr "estado" msgid "status change" msgstr "Cambio de Estatus" @@ -4039,7 +4024,7 @@ msgstr "Estado de Salida de" msgid "success" -msgstr "" +msgstr "éxito" msgid "sunday" msgstr "Domingo" @@ -4097,14 +4082,14 @@ msgstr "El valor \"%s\" ya esta en uso, favor de utilizar otro" msgid "there is no next page" -msgstr "" +msgstr "no existe página siguiente" msgid "there is no previous page" -msgstr "" +msgstr "no existe página anterior" #, python-format msgid "there is no transaction #%s" -msgstr "" +msgstr "no existe la transacción #%s" msgid "this action is not reversible!" msgstr "Esta acción es irreversible!." @@ -4200,7 +4185,7 @@ msgstr "n° de transición" msgid "transaction undone" -msgstr "" +msgstr "transacción anulada" #, python-format msgid "transition %(tr)s isn't allowed from %(st)s" @@ -4311,7 +4296,7 @@ msgstr "Valor no permitido" msgid "undefined user" -msgstr "" +msgstr "usuario indefinido" msgid "undo" msgstr "Anular" @@ -4323,10 +4308,6 @@ msgstr "Entidad externa desconocida" #, python-format -msgid "unknown option(s): %s" -msgstr "opcion(es) desconocida(s): %s" - -#, python-format msgid "unknown options %s" msgstr "opciones desconocidas: %s" @@ -4338,7 +4319,7 @@ msgstr "Vocabulario desconocido: " msgid "unsupported protocol" -msgstr "" +msgstr "protocolo no soportado" msgid "upassword" msgstr "Contraseña" @@ -4450,7 +4431,7 @@ msgstr "Preferencias" msgid "user's email account" -msgstr "" +msgstr "email del usuario" msgid "users" msgstr "Usuarios" @@ -4480,26 +4461,26 @@ #, python-format msgid "value %(KEY-value)s must be %(KEY-op)s %(KEY-boundary)s" -msgstr "" +msgstr "El valor %(KEY-value)s debe ser %(KEY-op)s %(KEY-boundary)s" #, python-format msgid "value %(KEY-value)s must be <= %(KEY-boundary)s" -msgstr "" +msgstr "el valor %(KEY-value)s debe ser <= %(KEY-boundary)s" #, python-format msgid "value %(KEY-value)s must be >= %(KEY-boundary)s" -msgstr "" +msgstr "el valor %(KEY-value)s debe ser >= %(KEY-boundary)s" msgid "value associated to this key is not editable manually" msgstr "El valor asociado a este elemento no es editable manualmente" #, python-format msgid "value should have maximum size of %(KEY-max)s but found %(KEY-size)s" -msgstr "" +msgstr "el valor máximo es %(KEY-max)s y encontramos %(KEY-size)s" #, python-format msgid "value should have minimum size of %(KEY-min)s but found %(KEY-size)s" -msgstr "" +msgstr "el valor mínimo debe ser %(KEY-min)s y encontramos %(KEY-size)s" msgid "vcard" msgstr "vcard" @@ -4541,14 +4522,14 @@ msgstr "Aún no podemos manejar este tipo de consulta Sparql" msgid "web sessions without CNX" -msgstr "" +msgstr "sesiones web sin conexión asociada" msgid "wednesday" msgstr "Miércoles" #, python-format msgid "welcome %s!" -msgstr "¡ Bienvenido %s !" +msgstr "Bienvenido %s." msgid "wf_info_for" msgstr "Histórico de" @@ -4575,10 +4556,10 @@ msgstr "Workflow" msgid "workflow already has a state of that name" -msgstr "" +msgstr "el workflow posee ya un estado con ese nombre" msgid "workflow already has a transition of that name" -msgstr "" +msgstr "El Workflow posee ya una transición con ese nombre" #, python-format msgid "workflow changed to \"%s\"" @@ -4618,13 +4599,13 @@ msgstr "Parámetro erróneo de consulta línea %s" msgid "xbel export" -msgstr "" +msgstr "Exportación XBEL" msgid "xml export" msgstr "Exportar XML" msgid "xml export (entities)" -msgstr "" +msgstr "Exportación XML (entidades)" msgid "yes" msgstr "Sí" @@ -4632,15 +4613,47 @@ msgid "you have been logged out" msgstr "Ha terminado la sesión" -#, python-format -msgid "you may want to specify something for %s" -msgstr "usted desea quizás especificar algo para la relación %s" - msgid "you should probably delete that property" msgstr "probablamente debería suprimir esta propriedad" -#, python-format -msgid "you should un-inline relation %s which is supported and may be crossed " -msgstr "" -"usted debe quitar la puesta en línea de la relación %s que es aceptada y " -"puede ser cruzada" +#~ msgid "%s relation should not be in mapped" +#~ msgstr "la relación %s no debería estar mapeada" + +#~ msgid "attribute/relation can't be mapped, only entity and relation types" +#~ msgstr "" +#~ "los atributos y las relaciones no pueden ser mapeados, solamente los " +#~ "tipos de entidad y de relación" + +#~ msgid "can't connect to source %s, some data may be missing" +#~ msgstr "no se puede conectar a la fuente %s, algunos datos pueden faltar" + +#~ msgid "can't mix dontcross and maycross options" +#~ msgstr "no puede mezclar las opciones dontcross y maycross" + +#~ msgid "can't mix dontcross and write options" +#~ msgstr "no puede mezclar las opciones dontcross y write" + +#~ msgid "error while querying source %s, some data may be missing" +#~ msgstr "" +#~ "Un error ha ocurrido al interrogar %s, es posible que los \n" +#~ "datos visibles se encuentren incompletos" + +#~ msgid "inlined relation %(rtype)s of %(etype)s should be supported" +#~ msgstr "" +#~ "la relación %(rtype)s del tipo de entidad %(etype)s debe ser aceptada " +#~ "('inlined')" + +#~ msgid "no edited fields specified for entity %s" +#~ msgstr "Ningún campo editable especificado para la entidad %s" + +#~ msgid "unknown option(s): %s" +#~ msgstr "opcion(es) desconocida(s): %s" + +#~ msgid "you may want to specify something for %s" +#~ msgstr "usted desea quizás especificar algo para la relación %s" + +#~ msgid "" +#~ "you should un-inline relation %s which is supported and may be crossed " +#~ msgstr "" +#~ "usted debe quitar la puesta en línea de la relación %s que es aceptada y " +#~ "puede ser cruzada" diff -r 84738d495ffd -r 793377697c81 i18n/fr.po --- a/i18n/fr.po Wed Sep 24 17:35:59 2014 +0200 +++ b/i18n/fr.po Wed Sep 24 18:04:30 2014 +0200 @@ -119,10 +119,6 @@ msgstr "%s appartient à une contrainte d'unicité transgressée" #, python-format -msgid "%s relation should not be in mapped" -msgstr "la relation %s ne devrait pas ếtre mappé" - -#, python-format msgid "%s software version of the database" msgstr "version logicielle de la base pour %s" @@ -1288,11 +1284,6 @@ msgid "attribute" msgstr "attribut" -msgid "attribute/relation can't be mapped, only entity and relation types" -msgstr "" -"les attributs et relations ne peuvent être mappés, uniquement les types " -"d'entité et de relation" - msgid "august" msgstr "août" @@ -1417,22 +1408,12 @@ msgstr "cet attribut ne peut pas être modifié" #, python-format -msgid "can't connect to source %s, some data may be missing" -msgstr "ne peut se connecter à la source %s, des données peuvent manquer" - -#, python-format msgid "can't display data, unexpected error: %s" msgstr "impossible d'afficher les données à cause de l'erreur suivante: %s" msgid "can't have multiple exits on the same state" msgstr "ne peut avoir plusieurs sorties sur le même état" -msgid "can't mix dontcross and maycross options" -msgstr "ne peut mélanger dontcross et maycross options" - -msgid "can't mix dontcross and write options" -msgstr "ne peut mélanger dontcross et write options" - #, python-format msgid "can't parse %(value)r (expected %(format)s)" msgstr "ne peut analyser %(value)r (format attendu : %(format)s)" @@ -2479,12 +2460,6 @@ msgstr "" "une erreur s'est produite lors de l'interprétation du texte au format ReST" -#, python-format -msgid "error while querying source %s, some data may be missing" -msgstr "" -"une erreur est survenue en interrogeant %s, il est possible que les\n" -"données affichées soient incomplètes" - msgid "exit state must be a subworkflow state" msgstr "l'état de sortie doit être un état du sous-workflow" @@ -2956,12 +2931,6 @@ msgid "inlined" msgstr "mise en ligne" -#, python-format -msgid "inlined relation %(rtype)s of %(etype)s should be supported" -msgstr "" -"la relation %(rtype)s du type d'entité %(etype)s doit être supportée " -"('inlined')" - msgid "instance home" msgstr "répertoire de l'instance" @@ -3349,10 +3318,6 @@ msgid "no edited fields specified" msgstr "aucun champ à éditer spécifié" -#, python-format -msgid "no edited fields specified for entity %s" -msgstr "aucun champ à éditer spécifié pour l'entité %s" - msgid "no log to display" msgstr "rien à afficher" @@ -4340,10 +4305,6 @@ msgstr "entité (externe) introuvable" #, python-format -msgid "unknown option(s): %s" -msgstr "option(s) inconnue(s) : %s" - -#, python-format msgid "unknown options %s" msgstr "options inconnues : %s" @@ -4654,15 +4615,47 @@ msgid "you have been logged out" msgstr "vous avez été déconnecté" -#, python-format -msgid "you may want to specify something for %s" -msgstr "vous désirez peut-être spécifié quelque chose pour la relation %s" - msgid "you should probably delete that property" msgstr "vous devriez probablement supprimer cette propriété" -#, python-format -msgid "you should un-inline relation %s which is supported and may be crossed " -msgstr "" -"vous devriez enlevé la mise en ligne de la relation %s qui est supportée et " -"peut-être croisée" +#~ msgid "%s relation should not be in mapped" +#~ msgstr "la relation %s ne devrait pas ếtre mappé" + +#~ msgid "attribute/relation can't be mapped, only entity and relation types" +#~ msgstr "" +#~ "les attributs et relations ne peuvent être mappés, uniquement les types " +#~ "d'entité et de relation" + +#~ msgid "can't connect to source %s, some data may be missing" +#~ msgstr "ne peut se connecter à la source %s, des données peuvent manquer" + +#~ msgid "can't mix dontcross and maycross options" +#~ msgstr "ne peut mélanger dontcross et maycross options" + +#~ msgid "can't mix dontcross and write options" +#~ msgstr "ne peut mélanger dontcross et write options" + +#~ msgid "error while querying source %s, some data may be missing" +#~ msgstr "" +#~ "une erreur est survenue en interrogeant %s, il est possible que les\n" +#~ "données affichées soient incomplètes" + +#~ msgid "inlined relation %(rtype)s of %(etype)s should be supported" +#~ msgstr "" +#~ "la relation %(rtype)s du type d'entité %(etype)s doit être supportée " +#~ "('inlined')" + +#~ msgid "no edited fields specified for entity %s" +#~ msgstr "aucun champ à éditer spécifié pour l'entité %s" + +#~ msgid "unknown option(s): %s" +#~ msgstr "option(s) inconnue(s) : %s" + +#~ msgid "you may want to specify something for %s" +#~ msgstr "vous désirez peut-être spécifié quelque chose pour la relation %s" + +#~ msgid "" +#~ "you should un-inline relation %s which is supported and may be crossed " +#~ msgstr "" +#~ "vous devriez enlevé la mise en ligne de la relation %s qui est supportée " +#~ "et peut-être croisée" diff -r 84738d495ffd -r 793377697c81 migration.py --- a/migration.py Wed Sep 24 17:35:59 2014 +0200 +++ b/migration.py Wed Sep 24 18:04:30 2014 +0200 @@ -413,7 +413,9 @@ toremove = (cube,) origcubes = self.config._cubes basecubes = [c for c in origcubes if not c in toremove] - self.config._cubes = tuple(self.config.expand_cubes(basecubes)) + # don't fake-add any new ones, or we won't be able to really-add them later + self.config._cubes = tuple(cube for cube in self.config.expand_cubes(basecubes) + if cube in origcubes) removed = [p for p in origcubes if not p in self.config._cubes] if not cube in removed and cube in origcubes: raise ConfigurationError("can't remove cube %s, " diff -r 84738d495ffd -r 793377697c81 misc/migration/3.10.0_Any.py --- a/misc/migration/3.10.0_Any.py Wed Sep 24 17:35:59 2014 +0200 +++ b/misc/migration/3.10.0_Any.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,6 +1,6 @@ from cubicweb.server.session import hooks_control -for uri, cfg in config.sources().items(): +for uri, cfg in config.read_sources_file().items(): if uri in ('system', 'admin'): continue repo.sources_by_uri[uri] = repo.get_source(cfg['adapter'], uri, cfg.copy()) @@ -18,7 +18,7 @@ 'WHERE s.cw_name=e.type') commit() -for uri, cfg in config.sources().items(): +for uri, cfg in config.read_sources_file().items(): if uri in ('system', 'admin'): continue repo.sources_by_uri.pop(uri) diff -r 84738d495ffd -r 793377697c81 misc/migration/3.11.0_Any.py --- a/misc/migration/3.11.0_Any.py Wed Sep 24 17:35:59 2014 +0200 +++ b/misc/migration/3.11.0_Any.py Wed Sep 24 18:04:30 2014 +0200 @@ -9,77 +9,3 @@ add_attribute('CWSource', 'url') add_attribute('CWSource', 'parser') add_attribute('CWSource', 'latest_retrieval') - -try: - from cubicweb.server.sources.pyrorql import PyroRQLSource -except ImportError: - pass -else: - - from os.path import join - # function to read old python mapping file - def load_mapping_file(source): - mappingfile = source.config['mapping-file'] - mappingfile = join(source.repo.config.apphome, mappingfile) - mapping = {} - execfile(mappingfile, mapping) - for junk in ('__builtins__', '__doc__'): - mapping.pop(junk, None) - mapping.setdefault('support_relations', {}) - mapping.setdefault('dont_cross_relations', set()) - mapping.setdefault('cross_relations', set()) - # do some basic checks of the mapping content - assert 'support_entities' in mapping, \ - 'mapping file should at least define support_entities' - assert isinstance(mapping['support_entities'], dict) - assert isinstance(mapping['support_relations'], dict) - assert isinstance(mapping['dont_cross_relations'], set) - assert isinstance(mapping['cross_relations'], set) - unknown = set(mapping) - set( ('support_entities', 'support_relations', - 'dont_cross_relations', 'cross_relations') ) - assert not unknown, 'unknown mapping attribute(s): %s' % unknown - # relations that are necessarily not crossed - for rtype in ('is', 'is_instance_of', 'cw_source'): - assert rtype not in mapping['dont_cross_relations'], \ - '%s relation should not be in dont_cross_relations' % rtype - assert rtype not in mapping['support_relations'], \ - '%s relation should not be in support_relations' % rtype - return mapping - # for now, only pyrorql sources have a mapping - for source in repo.sources_by_uri.itervalues(): - if not isinstance(source, PyroRQLSource): - continue - sourceentity = session.entity_from_eid(source.eid) - mapping = load_mapping_file(source) - # write mapping as entities - print 'migrating map for', source - for etype, write in mapping['support_entities'].items(): - create_entity('CWSourceSchemaConfig', - cw_for_source=sourceentity, - cw_schema=session.entity_from_eid(schema[etype].eid), - options=write and u'write' or None, - ask_confirm=False) - for rtype, write in mapping['support_relations'].items(): - options = [] - if write: - options.append(u'write') - if rtype in mapping['cross_relations']: - options.append(u'maycross') - create_entity('CWSourceSchemaConfig', - cw_for_source=sourceentity, - cw_schema=session.entity_from_eid(schema[rtype].eid), - options=u':'.join(options) or None, - ask_confirm=False) - for rtype in mapping['dont_cross_relations']: - create_entity('CWSourceSchemaConfig', - cw_for_source=source, - cw_schema=session.entity_from_eid(schema[rtype].eid), - options=u'dontcross', - ask_confirm=False) - # latest update time cwproperty is now a source attribute (latest_retrieval) - pkey = u'sources.%s.latest-update-time' % source.uri - rset = session.execute('Any V WHERE X is CWProperty, X value V, X pkey %(k)s', - {'k': pkey}) - timestamp = int(rset[0][0]) - sourceentity.cw_set(latest_retrieval=datetime.fromtimestamp(timestamp)) - session.execute('DELETE CWProperty X WHERE X pkey %(k)s', {'k': pkey}) diff -r 84738d495ffd -r 793377697c81 misc/migration/3.14.4_Any.py --- a/misc/migration/3.14.4_Any.py Wed Sep 24 17:35:59 2014 +0200 +++ b/misc/migration/3.14.4_Any.py Wed Sep 24 18:04:30 2014 +0200 @@ -4,8 +4,7 @@ rdefdef = schema['CWSource'].rdef('name') attrtype = y2sql.type_from_constraints(dbhelper, rdefdef.object, rdefdef.constraints).split()[0] -cursor = session.cnxset['system'] +cursor = session.cnxset.cu sql('UPDATE entities SET asource = source WHERE asource is NULL') dbhelper.change_col_type(cursor, 'entities', 'asource', attrtype, False) dbhelper.change_col_type(cursor, 'entities', 'source', attrtype, False) -dbhelper.change_col_type(cursor, 'deleted_entities', 'source', attrtype, False) diff -r 84738d495ffd -r 793377697c81 misc/migration/3.16.0_Any.py --- a/misc/migration/3.16.0_Any.py Wed Sep 24 17:35:59 2014 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,13 +0,0 @@ -sync_schema_props_perms('EmailAddress') - -for source in rql('CWSource X WHERE X type "pyrorql"').entities(): - sconfig = source.dictconfig - nsid = sconfig.pop('pyro-ns-id', config.appid) - nshost = sconfig.pop('pyro-ns-host', '') - nsgroup = sconfig.pop('pyro-ns-group', ':cubicweb') - if nsgroup: - nsgroup += '.' - source.cw_set(url=u'pyro://%s/%s%s' % (nshost, nsgroup, nsid)) - source.update_config(skip_unknown=True, **sconfig) - -commit() diff -r 84738d495ffd -r 793377697c81 misc/migration/3.17.11_Any.py --- a/misc/migration/3.17.11_Any.py Wed Sep 24 17:35:59 2014 +0200 +++ b/misc/migration/3.17.11_Any.py Wed Sep 24 18:04:30 2014 +0200 @@ -2,6 +2,6 @@ ('transactions', 'tx_time'), ('tx_entity_actions', 'tx_uuid'), ('tx_relation_actions', 'tx_uuid')]: - session.cnxset.source('system').create_index(session, table, column) + repo.system_source.create_index(session, table, column) commit() diff -r 84738d495ffd -r 793377697c81 misc/migration/3.18.0_Any.py --- a/misc/migration/3.18.0_Any.py Wed Sep 24 17:35:59 2014 +0200 +++ b/misc/migration/3.18.0_Any.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -driver = config.sources()['system']['db-driver'] +driver = config.system_source_config['db-driver'] if not (driver == 'postgres' or driver.startswith('sqlserver')): import sys print >>sys.stderr, 'This migration is not supported for backends other than sqlserver or postgres (yet).' diff -r 84738d495ffd -r 793377697c81 misc/migration/3.19.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/misc/migration/3.19.0_Any.py Wed Sep 24 18:04:30 2014 +0200 @@ -0,0 +1,3 @@ +sql('DROP TABLE "deleted_entities"') + +commit() diff -r 84738d495ffd -r 793377697c81 misc/migration/3.8.5_Any.py --- a/misc/migration/3.8.5_Any.py Wed Sep 24 17:35:59 2014 +0200 +++ b/misc/migration/3.8.5_Any.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ def migrate_varchar_to_nvarchar(): - dbdriver = config.sources()['system']['db-driver'] + dbdriver = config.system_source_config['db-driver'] if dbdriver != "sqlserver2005": return diff -r 84738d495ffd -r 793377697c81 misc/migration/bootstrapmigration_repository.py --- a/misc/migration/bootstrapmigration_repository.py Wed Sep 24 17:35:59 2014 +0200 +++ b/misc/migration/bootstrapmigration_repository.py Wed Sep 24 18:04:30 2014 +0200 @@ -35,6 +35,28 @@ ss.execschemarql(rql, rdef, ss.rdef2rql(rdef, CSTRMAP, groupmap=None)) commit(ask_confirm=False) +def replace_eid_sequence_with_eid_numrange(session): + dbh = session.repo.system_source.dbhelper + cursor = session.cnxset.cu + try: + cursor.execute(dbh.sql_sequence_current_state('entities_id_seq')) + lasteid = cursor.fetchone()[0] + except: # programming error, already migrated + return + + cursor.execute(dbh.sql_drop_sequence('entities_id_seq')) + cursor.execute(dbh.sql_create_numrange('entities_id_seq')) + cursor.execute(dbh.sql_restart_numrange('entities_id_seq', initial_value=lasteid)) + session.commit() + +if applcubicwebversion < (3, 19, 0) and cubicwebversion >= (3, 19, 0): + sql('ALTER TABLE "entities" DROP COLUMN "mtime"') + sql('ALTER TABLE "entities" DROP COLUMN "source"') + + commit() + + replace_eid_sequence_with_eid_numrange(session) + if applcubicwebversion < (3, 17, 0) and cubicwebversion >= (3, 17, 0): try: add_cube('sioc', update_database=False) @@ -223,11 +245,11 @@ if applcubicwebversion < (3, 2, 2) and cubicwebversion >= (3, 2, 1): from base64 import b64encode - for table in ('entities', 'deleted_entities'): - for eid, extid in sql('SELECT eid, extid FROM %s WHERE extid is NOT NULL' - % table, ask_confirm=False): - sql('UPDATE %s SET extid=%%(extid)s WHERE eid=%%(eid)s' % table, - {'extid': b64encode(extid), 'eid': eid}, ask_confirm=False) + for eid, extid in sql('SELECT eid, extid FROM entities ' + 'WHERE extid is NOT NULL', + ask_confirm=False): + sql('UPDATE entities SET extid=%(extid)s WHERE eid=%(eid)s', + {'extid': b64encode(extid), 'eid': eid}, ask_confirm=False) commit() if applcubicwebversion < (3, 2, 0) and cubicwebversion >= (3, 2, 0): diff -r 84738d495ffd -r 793377697c81 misc/scripts/cwuser_ldap2system.py --- a/misc/scripts/cwuser_ldap2system.py Wed Sep 24 17:35:59 2014 +0200 +++ b/misc/scripts/cwuser_ldap2system.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,7 +1,7 @@ import base64 from cubicweb.server.utils import crypt_password -dbdriver = config.sources()['system']['db-driver'] +dbdriver = config.system_source_config['db-driver'] from logilab.database import get_db_helper dbhelper = get_db_helper(driver) diff -r 84738d495ffd -r 793377697c81 misc/scripts/drop_external_entities.py --- a/misc/scripts/drop_external_entities.py Wed Sep 24 17:35:59 2014 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,23 +0,0 @@ -from cubicweb import UnknownEid -source, = __args__ - -sql("DELETE FROM entities WHERE type='Int'") - -ecnx = session.cnxset.connection(source) -for e in rql('Any X WHERE X cw_source S, S name %(name)s', {'name': source}).entities(): - meta = e.cw_metainformation() - assert meta['source']['uri'] == source - try: - suri = ecnx.describe(meta['extid'])[1] - except UnknownEid: - print 'cant describe', e.cw_etype, e.eid, meta - continue - if suri != 'system': - try: - print 'deleting', e.cw_etype, e.eid, suri, e.dc_title().encode('utf8') - repo.delete_info(session, e, suri, scleanup=e.eid) - except UnknownEid: - print ' cant delete', e.cw_etype, e.eid, meta - - -commit() diff -r 84738d495ffd -r 793377697c81 misc/scripts/ldap_change_base_dn.py --- a/misc/scripts/ldap_change_base_dn.py Wed Sep 24 17:35:59 2014 +0200 +++ b/misc/scripts/ldap_change_base_dn.py Wed Sep 24 18:04:30 2014 +0200 @@ -6,7 +6,7 @@ print print 'you should not have updated your sources file yet' -olddn = repo.config.sources()[uri]['user-base-dn'] +olddn = repo.sources_by_uri[uri].config['user-base-dn'] assert olddn != newdn diff -r 84738d495ffd -r 793377697c81 misc/scripts/repair_file_1-9_migration.py --- a/misc/scripts/repair_file_1-9_migration.py Wed Sep 24 17:35:59 2014 +0200 +++ b/misc/scripts/repair_file_1-9_migration.py Wed Sep 24 18:04:30 2014 +0200 @@ -15,11 +15,11 @@ from cubicweb import cwconfig, dbapi from cubicweb.server.session import hooks_control -sourcescfg = repo.config.sources() +defaultadmin = repo.config.default_admin_config backupcfg = cwconfig.instance_configuration(backupinstance) backupcfg.repairing = True -backuprepo, backupcnx = dbapi.in_memory_repo_cnx(backupcfg, sourcescfg['admin']['login'], - password=sourcescfg['admin']['password'], +backuprepo, backupcnx = dbapi.in_memory_repo_cnx(backupcfg, defaultadmin['login'], + password=defaultadmin['password'], host='localhost') backupcu = backupcnx.cursor() diff -r 84738d495ffd -r 793377697c81 multipart.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/multipart.py Wed Sep 24 18:04:30 2014 +0200 @@ -0,0 +1,413 @@ +# -*- coding: utf-8 -*- +''' +Parser for multipart/form-data +============================== + +This module provides a parser for the multipart/form-data format. It can read +from a file, a socket or a WSGI environment. The parser can be used to replace +cgi.FieldStorage (without the bugs) and works with Python 2.5+ and 3.x (2to3). + +Licence (MIT) +------------- + + Copyright (c) 2010, Marcel Hellkamp. + Inspired by the Werkzeug library: http://werkzeug.pocoo.org/ + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + +''' + +__author__ = 'Marcel Hellkamp' +__version__ = '0.1' +__license__ = 'MIT' + +from tempfile import TemporaryFile +from wsgiref.headers import Headers +import re, sys +try: + from urlparse import parse_qs +except ImportError: # pragma: no cover (fallback for Python 2.5) + from cgi import parse_qs +try: + from io import BytesIO +except ImportError: # pragma: no cover (fallback for Python 2.5) + from StringIO import StringIO as BytesIO + +############################################################################## +################################ Helper & Misc ################################ +############################################################################## +# Some of these were copied from bottle: http://bottle.paws.de/ + +try: + from collections import MutableMapping as DictMixin +except ImportError: # pragma: no cover (fallback for Python 2.5) + from UserDict import DictMixin + +class MultiDict(DictMixin): + """ A dict that remembers old values for each key """ + def __init__(self, *a, **k): + self.dict = dict() + for k, v in dict(*a, **k).iteritems(): + self[k] = v + + def __len__(self): return len(self.dict) + def __iter__(self): return iter(self.dict) + def __contains__(self, key): return key in self.dict + def __delitem__(self, key): del self.dict[key] + def keys(self): return self.dict.keys() + def __getitem__(self, key): return self.get(key, KeyError, -1) + def __setitem__(self, key, value): self.append(key, value) + + def append(self, key, value): self.dict.setdefault(key, []).append(value) + def replace(self, key, value): self.dict[key] = [value] + def getall(self, key): return self.dict.get(key) or [] + + def get(self, key, default=None, index=-1): + if key not in self.dict and default != KeyError: + return [default][index] + return self.dict[key][index] + + def iterallitems(self): + for key, values in self.dict.iteritems(): + for value in values: + yield key, value + +def tob(data, enc='utf8'): # Convert strings to bytes (py2 and py3) + return data.encode(enc) if isinstance(data, unicode) else data + +def copy_file(stream, target, maxread=-1, buffer_size=2*16): + ''' Read from :stream and write to :target until :maxread or EOF. ''' + size, read = 0, stream.read + while 1: + to_read = buffer_size if maxread < 0 else min(buffer_size, maxread-size) + part = read(to_read) + if not part: return size + target.write(part) + size += len(part) + +############################################################################## +################################ Header Parser ################################ +############################################################################## + +_special = re.escape('()<>@,;:\\"/[]?={} \t') +_re_special = re.compile('[%s]' % _special) +_qstr = '"(?:\\\\.|[^"])*"' # Quoted string +_value = '(?:[^%s]+|%s)' % (_special, _qstr) # Save or quoted string +_option = '(?:;|^)\s*([^%s]+)\s*=\s*(%s)' % (_special, _value) +_re_option = re.compile(_option) # key=value part of an Content-Type like header + +def header_quote(val): + if not _re_special.search(val): + return val + return '"' + val.replace('\\','\\\\').replace('"','\\"') + '"' + +def header_unquote(val, filename=False): + if val[0] == val[-1] == '"': + val = val[1:-1] + if val[1:3] == ':\\' or val[:2] == '\\\\': + val = val.split('\\')[-1] # fix ie6 bug: full path --> filename + return val.replace('\\\\','\\').replace('\\"','"') + return val + +def parse_options_header(header, options=None): + if ';' not in header: + return header.lower().strip(), {} + ctype, tail = header.split(';', 1) + options = options or {} + for match in _re_option.finditer(tail): + key = match.group(1).lower() + value = header_unquote(match.group(2), key=='filename') + options[key] = value + return ctype, options + +############################################################################## +################################## Multipart ################################## +############################################################################## + + +class MultipartError(ValueError): pass + + +class MultipartParser(object): + + def __init__(self, stream, boundary, content_length=-1, + disk_limit=2**30, mem_limit=2**20, memfile_limit=2**18, + buffer_size=2**16, charset='latin1'): + ''' Parse a multipart/form-data byte stream. This object is an iterator + over the parts of the message. + + :param stream: A file-like stream. Must implement ``.read(size)``. + :param boundary: The multipart boundary as a byte string. + :param content_length: The maximum number of bytes to read. + ''' + self.stream, self.boundary = stream, boundary + self.content_length = content_length + self.disk_limit = disk_limit + self.memfile_limit = memfile_limit + self.mem_limit = min(mem_limit, self.disk_limit) + self.buffer_size = min(buffer_size, self.mem_limit) + self.charset = charset + if self.buffer_size - 6 < len(boundary): # "--boundary--\r\n" + raise MultipartError('Boundary does not fit into buffer_size.') + self._done = [] + self._part_iter = None + + def __iter__(self): + ''' Iterate over the parts of the multipart message. ''' + if not self._part_iter: + self._part_iter = self._iterparse() + for part in self._done: + yield part + for part in self._part_iter: + self._done.append(part) + yield part + + def parts(self): + ''' Returns a list with all parts of the multipart message. ''' + return list(iter(self)) + + def get(self, name, default=None): + ''' Return the first part with that name or a default value (None). ''' + for part in self: + if name == part.name: + return part + return default + + def get_all(self, name): + ''' Return a list of parts with that name. ''' + return [p for p in self if p.name == name] + + def _lineiter(self): + ''' Iterate over a binary file-like object line by line. Each line is + returned as a (line, line_ending) tuple. If the line does not fit + into self.buffer_size, line_ending is empty and the rest of the line + is returned with the next iteration. + ''' + read = self.stream.read + maxread, maxbuf = self.content_length, self.buffer_size + _bcrnl = tob('\r\n') + _bcr = _bcrnl[:1] + _bnl = _bcrnl[1:] + _bempty = _bcrnl[:0] # b'rn'[:0] -> b'' + buffer = _bempty # buffer for the last (partial) line + while 1: + data = read(maxbuf if maxread < 0 else min(maxbuf, maxread)) + maxread -= len(data) + lines = (buffer+data).splitlines(True) + len_first_line = len(lines[0]) + # be sure that the first line does not become too big + if len_first_line > self.buffer_size: + # at the same time don't split a '\r\n' accidentally + if (len_first_line == self.buffer_size+1 and + lines[0].endswith(_bcrnl)): + splitpos = self.buffer_size - 1 + else: + splitpos = self.buffer_size + lines[:1] = [lines[0][:splitpos], + lines[0][splitpos:]] + if data: + buffer = lines[-1] + lines = lines[:-1] + for line in lines: + if line.endswith(_bcrnl): yield line[:-2], _bcrnl + elif line.endswith(_bnl): yield line[:-1], _bnl + elif line.endswith(_bcr): yield line[:-1], _bcr + else: yield line, _bempty + if not data: + break + + def _iterparse(self): + lines, line = self._lineiter(), '' + separator = tob('--') + tob(self.boundary) + terminator = tob('--') + tob(self.boundary) + tob('--') + # Consume first boundary. Ignore leading blank lines + for line, nl in lines: + if line: break + if line != separator: + raise MultipartError("Stream does not start with boundary") + # For each part in stream... + mem_used, disk_used = 0, 0 # Track used resources to prevent DoS + is_tail = False # True if the last line was incomplete (cutted) + opts = {'buffer_size': self.buffer_size, + 'memfile_limit': self.memfile_limit, + 'charset': self.charset} + part = MultipartPart(**opts) + for line, nl in lines: + if line == terminator and not is_tail: + part.file.seek(0) + yield part + break + elif line == separator and not is_tail: + if part.is_buffered(): mem_used += part.size + else: disk_used += part.size + part.file.seek(0) + yield part + part = MultipartPart(**opts) + else: + is_tail = not nl # The next line continues this one + part.feed(line, nl) + if part.is_buffered(): + if part.size + mem_used > self.mem_limit: + raise MultipartError("Memory limit reached.") + elif part.size + disk_used > self.disk_limit: + raise MultipartError("Disk limit reached.") + if line != terminator: + raise MultipartError("Unexpected end of multipart stream.") + + +class MultipartPart(object): + + def __init__(self, buffer_size=2**16, memfile_limit=2**18, charset='latin1'): + self.headerlist = [] + self.headers = None + self.file = False + self.size = 0 + self._buf = tob('') + self.disposition, self.name, self.filename = None, None, None + self.content_type, self.charset = None, charset + self.memfile_limit = memfile_limit + self.buffer_size = buffer_size + + def feed(self, line, nl=''): + if self.file: + return self.write_body(line, nl) + return self.write_header(line, nl) + + def write_header(self, line, nl): + line = line.decode(self.charset or 'latin1') + if not nl: raise MultipartError('Unexpected end of line in header.') + if not line.strip(): # blank line -> end of header segment + self.finish_header() + elif line[0] in ' \t' and self.headerlist: + name, value = self.headerlist.pop() + self.headerlist.append((name, value+line.strip())) + else: + if ':' not in line: + raise MultipartError("Syntax error in header: No colon.") + name, value = line.split(':', 1) + self.headerlist.append((name.strip(), value.strip())) + + def write_body(self, line, nl): + if not line and not nl: return # This does not even flush the buffer + self.size += len(line) + len(self._buf) + self.file.write(self._buf + line) + self._buf = nl + if self.content_length > 0 and self.size > self.content_length: + raise MultipartError('Size of body exceeds Content-Length header.') + if self.size > self.memfile_limit and isinstance(self.file, BytesIO): + # TODO: What about non-file uploads that exceed the memfile_limit? + self.file, old = TemporaryFile(mode='w+b'), self.file + old.seek(0) + copy_file(old, self.file, self.size, self.buffer_size) + + def finish_header(self): + self.file = BytesIO() + self.headers = Headers(self.headerlist) + cdis = self.headers.get('Content-Disposition','') + ctype = self.headers.get('Content-Type','') + clen = self.headers.get('Content-Length','-1') + if not cdis: + raise MultipartError('Content-Disposition header is missing.') + self.disposition, self.options = parse_options_header(cdis) + self.name = self.options.get('name') + self.filename = self.options.get('filename') + self.content_type, options = parse_options_header(ctype) + self.charset = options.get('charset') or self.charset + self.content_length = int(self.headers.get('Content-Length','-1')) + + def is_buffered(self): + ''' Return true if the data is fully buffered in memory.''' + return isinstance(self.file, BytesIO) + + @property + def value(self): + ''' Data decoded with the specified charset ''' + pos = self.file.tell() + self.file.seek(0) + val = self.file.read() + self.file.seek(pos) + return val.decode(self.charset) + + def save_as(self, path): + fp = open(path, 'wb') + pos = self.file.tell() + try: + self.file.seek(0) + size = copy_file(self.file, fp) + finally: + self.file.seek(pos) + return size + +############################################################################## +#################################### WSGI #################################### +############################################################################## + +def parse_form_data(environ, charset='utf8', strict=False, **kw): + ''' Parse form data from an environ dict and return a (forms, files) tuple. + Both tuple values are dictionaries with the form-field name as a key + (unicode) and lists as values (multiple values per key are possible). + The forms-dictionary contains form-field values as unicode strings. + The files-dictionary contains :class:`MultipartPart` instances, either + because the form-field was a file-upload or the value is to big to fit + into memory limits. + + :param environ: An WSGI environment dict. + :param charset: The charset to use if unsure. (default: utf8) + :param strict: If True, raise :exc:`MultipartError` on any parsing + errors. These are silently ignored by default. + ''' + + forms, files = MultiDict(), MultiDict() + try: + if environ.get('REQUEST_METHOD','GET').upper() not in ('POST', 'PUT'): + raise MultipartError("Request method other than POST or PUT.") + content_length = int(environ.get('CONTENT_LENGTH', '-1')) + content_type = environ.get('CONTENT_TYPE', '') + if not content_type: + raise MultipartError("Missing Content-Type header.") + content_type, options = parse_options_header(content_type) + stream = environ.get('wsgi.input') or BytesIO() + kw['charset'] = charset = options.get('charset', charset) + if content_type == 'multipart/form-data': + boundary = options.get('boundary','') + if not boundary: + raise MultipartError("No boundary for multipart/form-data.") + for part in MultipartParser(stream, boundary, content_length, **kw): + if part.filename or not part.is_buffered(): + files[part.name] = part + else: # TODO: Big form-fields are in the files dict. really? + forms[part.name] = part.value + elif content_type in ('application/x-www-form-urlencoded', + 'application/x-url-encoded'): + mem_limit = kw.get('mem_limit', 2**20) + if content_length > mem_limit: + raise MultipartError("Request to big. Increase MAXMEM.") + data = stream.read(mem_limit) + if stream.read(1): # These is more that does not fit mem_limit + raise MultipartError("Request to big. Increase MAXMEM.") + data = parse_qs(data, keep_blank_values=True) + for key, values in data.iteritems(): + for value in values: + forms[key] = value.decode(charset) + else: + raise MultipartError("Unsupported content type.") + except MultipartError: + if strict: raise + return forms, files + diff -r 84738d495ffd -r 793377697c81 predicates.py --- a/predicates.py Wed Sep 24 17:35:59 2014 +0200 +++ b/predicates.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -706,6 +706,22 @@ # entity predicates ############################################################# +class composite_etype(Predicate): + """Return 1 for composite entities. + + A composite entity has an etype for which at least one relation + definition points in its direction with the + composite='subject'/'object' notation. + """ + + def __call__(self, cls, req, **kwargs): + entity = kwargs.pop('entity', None) + if entity is None: + return 0 + return entity.e_schema.is_composite + + + class non_final_entity(EClassPredicate): """Return 1 for entity of a non final entity type(s). Remember, "final" entity types are String, Int, etc... This is equivalent to @@ -1273,7 +1289,7 @@ @objectify_predicate def authenticated_user(cls, req, **kwargs): - """Return 1 if the user is authenticated (e.g. not the anonymous user). + """Return 1 if the user is authenticated (i.e. not the anonymous user). May only be used on the web side, not on the data repository side. """ @@ -1284,7 +1300,7 @@ # XXX == ~ authenticated_user() def anonymous_user(): - """Return 1 if the user is not authenticated (e.g. is the anonymous user). + """Return 1 if the user is not authenticated (i.e. is the anonymous user). May only be used on the web side, not on the data repository side. """ diff -r 84738d495ffd -r 793377697c81 pytestconf.py --- a/pytestconf.py Wed Sep 24 17:35:59 2014 +0200 +++ b/pytestconf.py Wed Sep 24 18:04:30 2014 +0200 @@ -43,6 +43,6 @@ if not cls.repo.shutting_down: cls.repo.shutdown() del cls.repo - for clsattr in ('cnx', '_orig_cnx', 'config', '_config', 'vreg', 'schema'): + for clsattr in ('cnx', 'config', '_config', 'vreg', 'schema'): if clsattr in cls.__dict__: delattr(cls, clsattr) diff -r 84738d495ffd -r 793377697c81 repoapi.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/repoapi.py Wed Sep 24 18:04:30 2014 +0200 @@ -0,0 +1,400 @@ +# copyright 2013-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Official API to access the content of a repository +""" +from logilab.common.deprecation import deprecated + +from cubicweb.utils import parse_repo_uri +from cubicweb import ConnectionError, ProgrammingError, AuthenticationError +from uuid import uuid4 +from contextlib import contextmanager +from cubicweb.req import RequestSessionBase +from functools import wraps + +### private function for specific method ############################ + +def _get_inmemory_repo(config, vreg=None): + from cubicweb.server.repository import Repository + from cubicweb.server.utils import TasksManager + return Repository(config, TasksManager(), vreg=vreg) + + +### public API ###################################################### + +def get_repository(uri=None, config=None, vreg=None): + """get a repository for the given URI or config/vregistry (in case we're + loading the repository for a client, eg web server, configuration). + + The returned repository may be an in-memory repository or a proxy object + using a specific RPC method, depending on the given URI (pyro or zmq). + """ + if uri is None: + return _get_inmemory_repo(config, vreg) + + protocol, hostport, appid = parse_repo_uri(uri) + + if protocol == 'inmemory': + # me may have been called with a dummy 'inmemory://' uri ... + return _get_inmemory_repo(config, vreg) + + if protocol == 'pyroloc': # direct connection to the instance + from logilab.common.pyro_ext import get_proxy + uri = uri.replace('pyroloc', 'PYRO') + return get_proxy(uri) + + if protocol == 'pyro': # connection mediated through the pyro ns + from logilab.common.pyro_ext import ns_get_proxy + path = appid.strip('/') + if not path: + raise ConnectionError( + "can't find instance name in %s (expected to be the path component)" + % uri) + if '.' in path: + nsgroup, nsid = path.rsplit('.', 1) + else: + nsgroup = 'cubicweb' + nsid = path + return ns_get_proxy(nsid, defaultnsgroup=nsgroup, nshost=hostport) + + if protocol.startswith('zmqpickle-'): + from cubicweb.zmqclient import ZMQRepositoryClient + return ZMQRepositoryClient(uri) + else: + raise ConnectionError('unknown protocol: `%s`' % protocol) + +def connect(repo, login, **kwargs): + """Take credential and return associated ClientConnection. + + The ClientConnection is associated to a new Session object that will be + closed when the ClientConnection is closed. + + raise AuthenticationError if the credential are invalid.""" + sessionid = repo.connect(login, **kwargs) + session = repo._get_session(sessionid) + # XXX the autoclose_session should probably be handle on the session directly + # this is something to consider once we have proper server side Connection. + return ClientConnection(session, autoclose_session=True) + +def anonymous_cnx(repo): + """return a ClientConnection for Anonymous user. + + The ClientConnection is associated to a new Session object that will be + closed when the ClientConnection is closed. + + raises an AuthenticationError if anonymous usage is not allowed + """ + anoninfo = getattr(repo.config, 'anonymous_user', lambda: None)() + if anoninfo is None: # no anonymous user + raise AuthenticationError('anonymous access is not authorized') + anon_login, anon_password = anoninfo + # use vreg's repository cache + return connect(repo, anon_login, password=anon_password) + +def _srv_cnx_func(name): + """Decorate ClientConnection method blindly forward to Connection + THIS TRANSITIONAL PURPOSE + + will be dropped when we have standalone connection""" + def proxy(clt_cnx, *args, **kwargs): + # the ``with`` dance is transitional. We do not have Standalone + # Connection yet so we use this trick to unsure the session have the + # proper cnx loaded. This can be simplified one we have Standalone + # Connection object + if not clt_cnx._open: + raise ProgrammingError('Closed client connection') + return getattr(clt_cnx._cnx, name)(*args, **kwargs) + return proxy + +def _open_only(func): + """decorator for ClientConnection method that check it is open""" + @wraps(func) + def check_open(clt_cnx, *args, **kwargs): + if not clt_cnx._open: + raise ProgrammingError('Closed client connection') + return func(clt_cnx, *args, **kwargs) + return check_open + + +class ClientConnection(RequestSessionBase): + """A Connection object to be used Client side. + + This object is aimed to be used client side (so potential communication + with the repo through RPC) and aims to offer some compatibility with the + cubicweb.dbapi.Connection interface. + + The autoclose_session parameter informs the connection that this session + has been opened explicitly and only for this client connection. The + connection will close the session on exit. + """ + # make exceptions available through the connection object + ProgrammingError = ProgrammingError + # attributes that may be overriden per connection instance + anonymous_connection = False # XXX really needed ? + is_repo_in_memory = True # BC, always true + + def __init__(self, session, autoclose_session=False): + super(ClientConnection, self).__init__(session.vreg) + self._session = session # XXX there is no real reason to keep the + # session around function still using it should + # be rewritten and migrated. + self._cnx = None + self._open = None + self._web_request = False + #: cache entities built during the connection + self._eid_cache = {} + self._set_user(session.user) + self._autoclose_session = autoclose_session + + def __enter__(self): + assert self._open is None + self._open = True + self._cnx = self._session.new_cnx() + self._cnx.__enter__() + self._cnx.ctx_count += 1 + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self._open = False + self._cnx.ctx_count -= 1 + self._cnx.__exit__(exc_type, exc_val, exc_tb) + self._cnx = None + if self._autoclose_session: + # we have to call repo.close to ensure the repo properly forgets the + # session; calling session.close() is not enough :-( + self._session.repo.close(self._session.sessionid) + + + # begin silly BC + @property + def _closed(self): + return not self._open + + def close(self): + if self._open: + self.__exit__(None, None, None) + + def __repr__(self): + # XXX we probably want to reference the user of the session here + if self._open is None: + return '' + elif not self._open: + return '' + elif self.anonymous_connection: + return '' % self._cnx.connectionid + else: + return '' % self._cnx.connectionid + # end silly BC + + # Main Connection purpose in life ######################################### + + call_service = _srv_cnx_func('call_service') + + @_open_only + def execute(self, *args, **kwargs): + # the ``with`` dance is transitional. We do not have Standalone + # Connection yet so we use this trick to unsure the session have the + # proper cnx loaded. This can be simplified one we have Standalone + # Connection object + rset = self._cnx.execute(*args, **kwargs) + rset.req = self + # XXX keep the same behavior as the old dbapi + # otherwise multiple tests break. + # The little internet kitten is very sad about this situation. + rset._rqlst = None + return rset + + @_open_only + def commit(self, *args, **kwargs): + try: + return self._cnx.commit(*args, **kwargs) + finally: + self.drop_entity_cache() + + @_open_only + def rollback(self, *args, **kwargs): + try: + return self._cnx.rollback(*args, **kwargs) + finally: + self.drop_entity_cache() + + # security ################################################################# + + allow_all_hooks_but = _srv_cnx_func('allow_all_hooks_but') + deny_all_hooks_but = _srv_cnx_func('deny_all_hooks_but') + security_enabled = _srv_cnx_func('security_enabled') + + # direct sql ############################################################### + + system_sql = _srv_cnx_func('system_sql') + + # session data methods ##################################################### + + get_shared_data = _srv_cnx_func('get_shared_data') + set_shared_data = _srv_cnx_func('set_shared_data') + + @property + def transaction_data(self): + return self._cnx.transaction_data + + # meta-data accessors ###################################################### + + @_open_only + def source_defs(self): + """Return the definition of sources used by the repository.""" + return self._session.repo.source_defs() + + @_open_only + def get_schema(self): + """Return the schema currently used by the repository.""" + return self._session.repo.source_defs() + + @_open_only + def get_option_value(self, option): + """Return the value for `option` in the configuration.""" + return self._session.repo.get_option_value(option) + + entity_metas = _srv_cnx_func('entity_metas') + describe = _srv_cnx_func('describe') # XXX deprecated in 3.19 + + # undo support ############################################################ + + @_open_only + def undoable_transactions(self, ueid=None, req=None, **actionfilters): + """Return a list of undoable transaction objects by the connection's + user, ordered by descendant transaction time. + + Managers may filter according to user (eid) who has done the transaction + using the `ueid` argument. Others will only see their own transactions. + + Additional filtering capabilities is provided by using the following + named arguments: + + * `etype` to get only transactions creating/updating/deleting entities + of the given type + + * `eid` to get only transactions applied to entity of the given eid + + * `action` to get only transactions doing the given action (action in + 'C', 'U', 'D', 'A', 'R'). If `etype`, action can only be 'C', 'U' or + 'D'. + + * `public`: when additional filtering is provided, their are by default + only searched in 'public' actions, unless a `public` argument is given + and set to false. + """ + # the ``with`` dance is transitional. We do not have Standalone + # Connection yet so we use this trick to unsure the session have the + # proper cnx loaded. This can be simplified one we have Standalone + # Connection object + source = self._cnx.repo.system_source + txinfos = source.undoable_transactions(self._cnx, ueid, **actionfilters) + for txinfo in txinfos: + txinfo.req = req or self # XXX mostly wrong + return txinfos + + @_open_only + def transaction_info(self, txuuid, req=None): + """Return transaction object for the given uid. + + raise `NoSuchTransaction` if not found or if session's user is not + allowed (eg not in managers group and the transaction doesn't belong to + him). + """ + # the ``with`` dance is transitional. We do not have Standalone + # Connection yet so we use this trick to unsure the session have the + # proper cnx loaded. This can be simplified one we have Standalone + # Connection object + txinfo = self._cnx.repo.system_source.tx_info(self._cnx, txuuid) + if req: + txinfo.req = req + else: + txinfo.cnx = self + return txinfo + + @_open_only + def transaction_actions(self, txuuid, public=True): + """Return an ordered list of action effectued during that transaction. + + If public is true, return only 'public' actions, eg not ones triggered + under the cover by hooks, else return all actions. + + raise `NoSuchTransaction` if the transaction is not found or if + session's user is not allowed (eg not in managers group and the + transaction doesn't belong to him). + """ + # the ``with`` dance is transitional. We do not have Standalone + # Connection yet so we use this trick to unsure the session have the + # proper cnx loaded. This can be simplified one we have Standalone + # Connection object + return self._cnx.repo.system_source.tx_actions(self._cnx, txuuid, public) + + @_open_only + def undo_transaction(self, txuuid): + """Undo the given transaction. Return potential restoration errors. + + raise `NoSuchTransaction` if not found or if session's user is not + allowed (eg not in managers group and the transaction doesn't belong to + him). + """ + # the ``with`` dance is transitional. We do not have Standalone + # Connection yet so we use this trick to unsure the session have the + # proper cnx loaded. This can be simplified one we have Standalone + # Connection object + return self._cnx.repo.system_source.undo_transaction(self._cnx, txuuid) + + # cache management + + def entity_cache(self, eid): + return self._eid_cache[eid] + + def set_entity_cache(self, entity): + self._eid_cache[entity.eid] = entity + + def cached_entities(self): + return self._eid_cache.values() + + def drop_entity_cache(self, eid=None): + if eid is None: + self._eid_cache = {} + else: + del self._eid_cache[eid] + + # deprecated stuff + + @deprecated('[3.19] This is a repoapi.ClientConnection object not a dbapi one') + def request(self): + return self + + @deprecated('[3.19] This is a repoapi.ClientConnection object not a dbapi one') + def cursor(self): + return self + + @property + @deprecated('[3.19] This is a repoapi.ClientConnection object not a dbapi one') + def sessionid(self): + return self._session.sessionid + + @property + @deprecated('[3.19] This is a repoapi.ClientConnection object not a dbapi one') + def connection(self): + return self + + @property + @deprecated('[3.19] This is a repoapi.ClientConnection object not a dbapi one') + def _repo(self): + return self._session.repo diff -r 84738d495ffd -r 793377697c81 req.py --- a/req.py Wed Sep 24 17:35:59 2014 +0200 +++ b/req.py Wed Sep 24 18:04:30 2014 +0200 @@ -75,6 +75,23 @@ self.local_perm_cache = {} self._ = unicode + def _set_user(self, orig_user): + """set the user for this req_session_base + + A special method is needed to ensure the linked user is linked to the + connection too. + """ + # cnx validity is checked by the call to .user_info + rset = self.eid_rset(orig_user.eid, 'CWUser') + user_cls = self.vreg['etypes'].etype_class('CWUser') + user = user_cls(self, rset, row=0, groups=orig_user.groups, + properties=orig_user.properties) + user.cw_attr_cache['login'] = orig_user.login # cache login + self.user = user + self.set_entity_cache(user) + self.set_language(user.prefered_language()) + + def set_language(self, lang): """install i18n configuration for `lang` translation. @@ -86,7 +103,7 @@ self._ = self.__ = gettext self.pgettext = pgettext - def get_option_value(self, option, foreid=None): + def get_option_value(self, option): raise NotImplementedError def property_value(self, key): @@ -94,7 +111,9 @@ user specific value if any, else using site value """ if self.user: - return self.user.property_value(key) + val = self.user.property_value(key) + if val is not None: + return val return self.vreg.property_value(key) def etype_rset(self, etype, size=1): @@ -114,7 +133,7 @@ """ eid = int(eid) if etype is None: - etype = self.describe(eid)[0] + etype = self.entity_metas(eid)['type'] rset = ResultSet([(eid,)], 'Any X WHERE X eid %(x)s', {'x': eid}, [(etype,)]) rset.req = self @@ -188,7 +207,7 @@ """ parts = ['Any X WHERE X is %s' % etype] varmaker = rqlvar_maker(defined='X') - eschema = self.vreg.schema[etype] + eschema = self.vreg.schema.eschema(etype) for attr, value in kwargs.items(): if isinstance(value, list) or isinstance(value, tuple): raise NotImplementedError("List of values are not supported") @@ -224,6 +243,11 @@ - cubes.blog.mycache - etc. """ + warn.warning('[3.19] .get_cache will disappear soon. ' + 'Distributed caching mechanisms are being introduced instead.' + 'Other caching mechanism can be used more reliably ' + 'to the same effect.', + DeprecationWarning) if cachename in CACHE_REGISTRY: cache = CACHE_REGISTRY[cachename] else: @@ -253,24 +277,20 @@ """ # use *args since we don't want first argument to be "anonymous" to # avoid potential clash with kwargs + method = None if args: assert len(args) == 1, 'only 0 or 1 non-named-argument expected' method = args[0] - else: - method = None + if method is None: + method = 'view' # XXX I (adim) think that if method is passed explicitly, we should # not try to process it and directly call req.build_url() - if method is None: - if self.from_controller() == 'view' and not '_restpath' in kwargs: - method = self.relative_path(includeparams=False) or 'view' - else: - method = 'view' base_url = kwargs.pop('base_url', None) if base_url is None: secure = kwargs.pop('__secure__', None) base_url = self.base_url(secure=secure) if '_restpath' in kwargs: - assert method == 'view', method + assert method == 'view', repr(method) path = kwargs.pop('_restpath') else: path = method diff -r 84738d495ffd -r 793377697c81 rset.py --- a/rset.py Wed Sep 24 17:35:59 2014 +0200 +++ b/rset.py Wed Sep 24 18:04:30 2014 +0200 @@ -112,9 +112,6 @@ """returns the result set's size""" return self.rowcount - def __nonzero__(self): - return self.rowcount - def __getitem__(self, i): """returns the ith element of the result set""" return self.rows[i] #ResultSetRow(self.rows[i]) @@ -544,7 +541,8 @@ else: attr_cols[attr] = i else: - rdef = eschema.rdef(attr, role) + # XXX takefirst=True to remove warning triggered by ambiguous relations + rdef = eschema.rdef(attr, role, takefirst=True) # only keep value if it can't be multivalued if rdef.role_cardinality(role) in '1?': rel_cols[(attr, role)] = i diff -r 84738d495ffd -r 793377697c81 schema.py --- a/schema.py Wed Sep 24 17:35:59 2014 +0200 +++ b/schema.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -67,10 +67,13 @@ 'allowed_transition', 'destination_state', 'from_state', 'to_state', 'condition', 'subworkflow', 'subworkflow_state', 'subworkflow_exit', + 'by_transition', )) SYSTEM_RTYPES = set(('in_group', 'require_group', # cwproperty 'for_user', + 'cw_schema', 'cw_import_of', 'cw_for_source', + 'cw_host_config_of', )) | WORKFLOW_RTYPES NO_I18NCONTEXT = META_RTYPES | WORKFLOW_RTYPES @@ -559,7 +562,7 @@ PermissionMixIn.set_action_permissions = set_action_permissions def has_local_role(self, action): - """return true if the action *may* be granted localy (eg either rql + """return true if the action *may* be granted locally (eg either rql expressions or the owners group are used in security definition) XXX this method is only there since we don't know well how to deal with @@ -581,7 +584,7 @@ PermissionMixIn.may_have_permission = may_have_permission def has_perm(self, _cw, action, **kwargs): - """return true if the action is granted globaly or localy""" + """return true if the action is granted globally or locally""" try: self.check_perm(_cw, action, **kwargs) return True @@ -675,6 +678,34 @@ eid = getattr(edef, 'eid', None) self.eid = eid + def targets(self, role): + assert role in ('subject', 'object') + if role == 'subject': + return self.subjrels.values() + return self.objrels.values() + + @cachedproperty + def composite_rdef_roles(self): + """Return all relation definitions that define the current entity + type as a composite. + """ + rdef_roles = [] + for role in ('subject', 'object'): + for rschema in self.targets(role): + if rschema.final: + continue + for rdef in rschema.rdefs.values(): + if (role == 'subject' and rdef.subject == self) or \ + (role == 'object' and rdef.object == self): + crole = rdef.composite + if crole == role: + rdef_roles.append((rdef, role)) + return rdef_roles + + @cachedproperty + def is_composite(self): + return bool(len(self.composite_rdef_roles)) + def check_permission_definitions(self): super(CubicWebEntitySchema, self).check_permission_definitions() for groups in self.permissions.itervalues(): @@ -818,20 +849,20 @@ assert not ('fromeid' in kwargs or 'toeid' in kwargs), kwargs assert action in ('read', 'update') if 'eid' in kwargs: - subjtype = _cw.describe(kwargs['eid'])[0] + subjtype = _cw.entity_metas(kwargs['eid'])['type'] else: subjtype = objtype = None else: assert not 'eid' in kwargs, kwargs assert action in ('read', 'add', 'delete') if 'fromeid' in kwargs: - subjtype = _cw.describe(kwargs['fromeid'])[0] + subjtype = _cw.entity_metas(kwargs['fromeid'])['type'] elif 'frometype' in kwargs: subjtype = kwargs.pop('frometype') else: subjtype = None if 'toeid' in kwargs: - objtype = _cw.describe(kwargs['toeid'])[0] + objtype = _cw.entity_metas(kwargs['toeid'])['type'] elif 'toetype' in kwargs: objtype = kwargs.pop('toetype') else: diff -r 84738d495ffd -r 793377697c81 server/__init__.py --- a/server/__init__.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/__init__.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -31,8 +31,6 @@ from logilab.common.modutils import LazyObject from logilab.common.textutils import splitstrip from logilab.common.registry import yes -from logilab import database - from yams import BASE_GROUPS from cubicweb import CW_SOFTWARE_ROOT @@ -199,12 +197,13 @@ {'u': user.eid, 'group': group}) return user -def init_repository(config, interactive=True, drop=False, vreg=None): +def init_repository(config, interactive=True, drop=False, vreg=None, + init_config=None): """initialise a repository database by creating tables add filling them with the minimal set of entities (ie at least the schema, base groups and a initial user) """ - from cubicweb.dbapi import in_memory_repo_cnx + from cubicweb.repoapi import get_repository, connect from cubicweb.server.repository import Repository from cubicweb.server.utils import manager_userpasswd from cubicweb.server.sqlutils import sqlexec, sqlschema, sql_drop_all_user_tables @@ -217,8 +216,11 @@ config.cube_appobject_path = set(('hooks', 'entities')) # only enable the system source at initialization time repo = Repository(config, vreg=vreg) + if init_config is not None: + # further config initialization once it has been bootstrapped + init_config(config) schema = repo.schema - sourcescfg = config.sources() + sourcescfg = config.read_sources_file() source = sourcescfg['system'] driver = source['db-driver'] sqlcnx = repo.system_source.get_connection() @@ -257,49 +259,48 @@ sqlcursor.close() sqlcnx.commit() sqlcnx.close() - session = repo.internal_session() - # insert entity representing the system source - ssource = session.create_entity('CWSource', type=u'native', name=u'system') - repo.system_source.eid = ssource.eid - session.execute('SET X cw_source X WHERE X eid %(x)s', {'x': ssource.eid}) - # insert base groups and default admin - print '-> inserting default user and default groups.' - try: - login = unicode(sourcescfg['admin']['login']) - pwd = sourcescfg['admin']['password'] - except KeyError: - if interactive: - msg = 'enter login and password of the initial manager account' - login, pwd = manager_userpasswd(msg=msg, confirm=True) - else: - login, pwd = unicode(source['db-user']), source['db-password'] - # sort for eid predicatability as expected in some server tests - for group in sorted(BASE_GROUPS): - session.create_entity('CWGroup', name=unicode(group)) - admin = create_user(session, login, pwd, 'managers') - session.execute('SET X owned_by U WHERE X is IN (CWGroup,CWSource), U eid %(u)s', - {'u': admin.eid}) - session.commit() - session.close() + with repo.internal_cnx() as cnx: + # insert entity representing the system source + ssource = cnx.create_entity('CWSource', type=u'native', name=u'system') + repo.system_source.eid = ssource.eid + cnx.execute('SET X cw_source X WHERE X eid %(x)s', {'x': ssource.eid}) + # insert base groups and default admin + print '-> inserting default user and default groups.' + try: + login = unicode(sourcescfg['admin']['login']) + pwd = sourcescfg['admin']['password'] + except KeyError: + if interactive: + msg = 'enter login and password of the initial manager account' + login, pwd = manager_userpasswd(msg=msg, confirm=True) + else: + login, pwd = unicode(source['db-user']), source['db-password'] + # sort for eid predicatability as expected in some server tests + for group in sorted(BASE_GROUPS): + cnx.create_entity('CWGroup', name=unicode(group)) + admin = create_user(cnx, login, pwd, 'managers') + cnx.execute('SET X owned_by U WHERE X is IN (CWGroup,CWSource), U eid %(u)s', + {'u': admin.eid}) + cnx.commit() repo.shutdown() # reloging using the admin user config._cubes = None # avoid assertion error - repo, cnx = in_memory_repo_cnx(config, login, password=pwd) - repo.system_source.eid = ssource.eid # redo this manually - assert len(repo.sources) == 1, repo.sources - handler = config.migration_handler(schema, interactive=False, - cnx=cnx, repo=repo) - # install additional driver specific sql files - handler.cmd_install_custom_sql_scripts() - for cube in reversed(config.cubes()): - handler.cmd_install_custom_sql_scripts(cube) - # serialize the schema - initialize_schema(config, schema, handler) - # yoo ! - cnx.commit() - repo.system_source.init_creating() - cnx.commit() - cnx.close() + repo = get_repository(config=config) + with connect(repo, login, password=pwd) as cnx: + with cnx.security_enabled(False, False): + repo.system_source.eid = ssource.eid # redo this manually + handler = config.migration_handler(schema, interactive=False, + cnx=cnx, repo=repo) + # install additional driver specific sql files + handler.cmd_install_custom_sql_scripts() + for cube in reversed(config.cubes()): + handler.cmd_install_custom_sql_scripts(cube) + # serialize the schema + initialize_schema(config, schema, handler) + # yoo ! + cnx.commit() + repo.system_source.init_creating() + cnx.commit() repo.shutdown() # restore initial configuration config.creating = False @@ -312,13 +313,13 @@ def initialize_schema(config, schema, mhandler, event='create'): from cubicweb.server.schemaserial import serialize_schema - session = mhandler.session + cnx = mhandler.cnx cubes = config.cubes() # deactivate every hooks but those responsible to set metadata # so, NO INTEGRITY CHECKS are done, to have quicker db creation. # Active integrity is kept else we may pb such as two default # workflows for one entity type. - with session.deny_all_hooks_but('metadata', 'activeintegrity'): + with cnx.deny_all_hooks_but('metadata', 'activeintegrity'): # execute cubicweb's pre script mhandler.cmd_exec_event_script('pre%s' % event) # execute cubes pre script if any @@ -327,8 +328,7 @@ # execute instance's pre script (useful in tests) mhandler.cmd_exec_event_script('pre%s' % event, apphome=True) # enter instance'schema into the database - session.set_cnxset() - serialize_schema(session, schema) + serialize_schema(cnx, schema) # execute cubicweb's post script mhandler.cmd_exec_event_script('post%s' % event) # execute cubes'post script if any @@ -353,6 +353,4 @@ SOURCE_TYPES = {'native': LazyObject('cubicweb.server.sources.native', 'NativeSQLSource'), 'datafeed': LazyObject('cubicweb.server.sources.datafeed', 'DataFeedSource'), 'ldapfeed': LazyObject('cubicweb.server.sources.ldapfeed', 'LDAPFeedSource'), - 'pyrorql': LazyObject('cubicweb.server.sources.pyrorql', 'PyroRQLSource'), - 'zmqrql': LazyObject('cubicweb.server.sources.zmqrql', 'ZMQRQLSource'), } diff -r 84738d495ffd -r 793377697c81 server/checkintegrity.py --- a/server/checkintegrity.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/checkintegrity.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -35,29 +35,17 @@ sys.stderr.write(' [FIXED]') sys.stderr.write('\n') -def has_eid(session, sqlcursor, eid, eids): +def has_eid(cnx, sqlcursor, eid, eids): """return true if the eid is a valid eid""" if eid in eids: return eids[eid] - sqlcursor.execute('SELECT type, source FROM entities WHERE eid=%s' % eid) + sqlcursor.execute('SELECT type FROM entities WHERE eid=%s' % eid) try: - etype, source = sqlcursor.fetchone() + etype = sqlcursor.fetchone()[0] except Exception: eids[eid] = False return False - if source and source != 'system': - try: - # insert eid *and* etype to attempt checking entity has not been - # replaced by another subsquently to a restore of an old dump - if session.execute('Any X WHERE X is %s, X eid %%(x)s' % etype, - {'x': eid}): - eids[eid] = True - return True - except Exception: # TypeResolverError, Unauthorized... - pass - eids[eid] = False - return False - if etype not in session.vreg.schema: + if etype not in cnx.vreg.schema: eids[eid] = False return False sqlcursor.execute('SELECT * FROM %s%s WHERE %seid=%s' % (SQL_PREFIX, etype, @@ -94,16 +82,17 @@ else: yield eschema -def reindex_entities(schema, session, withpb=True, etypes=None): +def reindex_entities(schema, cnx, withpb=True, etypes=None): """reindex all entities in the repository""" # deactivate modification_date hook since we don't want them # to be updated due to the reindexation - repo = session.repo - cursor = session.cnxset['system'] - dbhelper = session.repo.system_source.dbhelper - if not dbhelper.has_fti_table(cursor): - print 'no text index table' - dbhelper.init_fti(cursor) + repo = cnx.repo + dbhelper = repo.system_source.dbhelper + with cnx.ensure_cnx_set: + cursor = cnx.cnxset.cu + if not dbhelper.has_fti_table(cursor): + print 'no text index table' + dbhelper.init_fti(cursor) repo.system_source.do_fti = True # ensure full-text indexation is activated if etypes is None: print 'Reindexing entities' @@ -117,15 +106,15 @@ for container in etype_fti_containers(eschema): etypes.add(container) # clear fti table first - session.system_sql('DELETE FROM %s' % dbhelper.fti_table) + cnx.system_sql('DELETE FROM %s' % dbhelper.fti_table) else: print 'Reindexing entities of type %s' % \ ', '.join(sorted(str(e) for e in etypes)) # clear fti table first. Use subquery for sql compatibility - session.system_sql("DELETE FROM %s WHERE EXISTS(SELECT 1 FROM ENTITIES " - "WHERE eid=%s AND type IN (%s))" % ( - dbhelper.fti_table, dbhelper.fti_uid_attr, - ','.join("'%s'" % etype for etype in etypes))) + cnx.system_sql("DELETE FROM %s WHERE EXISTS(SELECT 1 FROM ENTITIES " + "WHERE eid=%s AND type IN (%s))" % ( + dbhelper.fti_table, dbhelper.fti_uid_attr, + ','.join("'%s'" % etype for etype in etypes))) if withpb: pb = ProgressBar(len(etypes) + 1) pb.update() @@ -133,17 +122,19 @@ # attribute to their current value source = repo.system_source for eschema in etypes: - etype_class = session.vreg['etypes'].etype_class(str(eschema)) - for fti_rql in etype_class.cw_fti_index_rql_queries(session): - rset = session.execute(fti_rql) - source.fti_index_entities(session, rset.entities()) + etype_class = cnx.vreg['etypes'].etype_class(str(eschema)) + for fti_rql in etype_class.cw_fti_index_rql_queries(cnx): + rset = cnx.execute(fti_rql) + source.fti_index_entities(cnx, rset.entities()) # clear entity cache to avoid high memory consumption on big tables - session.drop_entity_cache() + cnx.drop_entity_cache() if withpb: pb.update() + if withpb: + pb.finish() -def check_schema(schema, session, eids, fix=1): +def check_schema(schema, cnx, eids, fix=1): """check serialized schema""" print 'Checking serialized schema' unique_constraints = ('SizeConstraint', 'FormatConstraint', @@ -153,7 +144,7 @@ 'WHERE X is CWConstraint, R constrained_by X, ' 'R relation_type RT, RT name RN, R from_entity ST, ST name SN, ' 'R to_entity OT, OT name ON, X cstrtype CT, CT name CTN') - for count, rn, sn, on, cstrname in session.execute(rql): + for count, rn, sn, on, cstrname in cnx.execute(rql): if count == 1: continue if cstrname in unique_constraints: @@ -164,37 +155,38 @@ -def check_text_index(schema, session, eids, fix=1): +def check_text_index(schema, cnx, eids, fix=1): """check all entities registered in the text index""" print 'Checking text index' msg = ' Entity with eid %s exists in the text index but in no source (autofix will remove from text index)' - cursor = session.system_sql('SELECT uid FROM appears;') + cursor = cnx.system_sql('SELECT uid FROM appears;') for row in cursor.fetchall(): eid = row[0] - if not has_eid(session, cursor, eid, eids): + if not has_eid(cnx, cursor, eid, eids): sys.stderr.write(msg % eid) if fix: - session.system_sql('DELETE FROM appears WHERE uid=%s;' % eid) + cnx.system_sql('DELETE FROM appears WHERE uid=%s;' % eid) notify_fixed(fix) -def check_entities(schema, session, eids, fix=1): +def check_entities(schema, cnx, eids, fix=1): """check all entities registered in the repo system table""" print 'Checking entities system table' # system table but no source msg = ' Entity %s with eid %s exists in the system table but in no source (autofix will delete the entity)' - cursor = session.system_sql('SELECT eid,type FROM entities;') + cursor = cnx.system_sql('SELECT eid,type FROM entities;') for row in cursor.fetchall(): eid, etype = row - if not has_eid(session, cursor, eid, eids): + if not has_eid(cnx, cursor, eid, eids): sys.stderr.write(msg % (etype, eid)) if fix: - session.system_sql('DELETE FROM entities WHERE eid=%s;' % eid) + cnx.system_sql('DELETE FROM entities WHERE eid=%s;' % eid) notify_fixed(fix) # source in entities, but no relation cw_source - applcwversion = session.repo.get_versions().get('cubicweb') - if applcwversion >= (3,13,1): # entities.asource appeared in 3.13.1 - cursor = session.system_sql('SELECT e.eid FROM entities as e, cw_CWSource as s ' + # XXX this (get_versions) requires a second connection to the db when we already have one open + applcwversion = cnx.repo.get_versions().get('cubicweb') + if applcwversion >= (3, 13, 1): # entities.asource appeared in 3.13.1 + cursor = cnx.system_sql('SELECT e.eid FROM entities as e, cw_CWSource as s ' 'WHERE s.cw_name=e.asource AND ' 'NOT EXISTS(SELECT 1 FROM cw_source_relation as cs ' ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid) ' @@ -204,35 +196,35 @@ for row in cursor.fetchall(): sys.stderr.write(msg % row[0]) if fix: - session.system_sql('INSERT INTO cw_source_relation (eid_from, eid_to) ' + cnx.system_sql('INSERT INTO cw_source_relation (eid_from, eid_to) ' 'SELECT e.eid, s.cw_eid FROM entities as e, cw_CWSource as s ' 'WHERE s.cw_name=e.asource AND NOT EXISTS(SELECT 1 FROM cw_source_relation as cs ' ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid)') notify_fixed(True) # inconsistencies for 'is' msg = ' %s #%s is missing relation "is" (autofix will create the relation)\n' - cursor = session.system_sql('SELECT e.type, e.eid FROM entities as e, cw_CWEType as s ' + cursor = cnx.system_sql('SELECT e.type, e.eid FROM entities as e, cw_CWEType as s ' 'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_relation as cs ' ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid) ' 'ORDER BY e.eid') for row in cursor.fetchall(): sys.stderr.write(msg % row) if fix: - session.system_sql('INSERT INTO is_relation (eid_from, eid_to) ' + cnx.system_sql('INSERT INTO is_relation (eid_from, eid_to) ' 'SELECT e.eid, s.cw_eid FROM entities as e, cw_CWEType as s ' 'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_relation as cs ' ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid)') notify_fixed(True) # inconsistencies for 'is_instance_of' msg = ' %s #%s is missing relation "is_instance_of" (autofix will create the relation)\n' - cursor = session.system_sql('SELECT e.type, e.eid FROM entities as e, cw_CWEType as s ' + cursor = cnx.system_sql('SELECT e.type, e.eid FROM entities as e, cw_CWEType as s ' 'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_instance_of_relation as cs ' ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid) ' 'ORDER BY e.eid') for row in cursor.fetchall(): sys.stderr.write(msg % row) if fix: - session.system_sql('INSERT INTO is_instance_of_relation (eid_from, eid_to) ' + cnx.system_sql('INSERT INTO is_instance_of_relation (eid_from, eid_to) ' 'SELECT e.eid, s.cw_eid FROM entities as e, cw_CWEType as s ' 'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_instance_of_relation as cs ' ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid)') @@ -244,7 +236,7 @@ continue table = SQL_PREFIX + eschema.type column = SQL_PREFIX + 'eid' - cursor = session.system_sql('SELECT %s FROM %s;' % (column, table)) + cursor = cnx.system_sql('SELECT %s FROM %s;' % (column, table)) for row in cursor.fetchall(): eid = row[0] # eids is full since we have fetched everything from the entities table, @@ -252,7 +244,7 @@ if not eid in eids or not eids[eid]: sys.stderr.write(msg % (eid, eschema.type)) if fix: - session.system_sql('DELETE FROM %s WHERE %s=%s;' % (table, column, eid)) + cnx.system_sql('DELETE FROM %s WHERE %s=%s;' % (table, column, eid)) notify_fixed(fix) @@ -268,7 +260,7 @@ notify_fixed(fix) -def check_relations(schema, session, eids, fix=1): +def check_relations(schema, cnx, eids, fix=1): """check that eids referenced by relations are registered in the repo system table """ @@ -282,42 +274,42 @@ column = SQL_PREFIX + str(rschema) sql = 'SELECT cw_eid,%s FROM %s WHERE %s IS NOT NULL;' % ( column, table, column) - cursor = session.system_sql(sql) + cursor = cnx.system_sql(sql) for row in cursor.fetchall(): parent_eid, eid = row - if not has_eid(session, cursor, eid, eids): + if not has_eid(cnx, cursor, eid, eids): bad_inlined_msg(rschema, parent_eid, eid, fix) if fix: sql = 'UPDATE %s SET %s=NULL WHERE %s=%s;' % ( table, column, column, eid) - session.system_sql(sql) + cnx.system_sql(sql) continue try: - cursor = session.system_sql('SELECT eid_from FROM %s_relation;' % rschema) + cursor = cnx.system_sql('SELECT eid_from FROM %s_relation;' % rschema) except Exception as ex: # usually because table doesn't exist print 'ERROR', ex continue for row in cursor.fetchall(): eid = row[0] - if not has_eid(session, cursor, eid, eids): + if not has_eid(cnx, cursor, eid, eids): bad_related_msg(rschema, 'subject', eid, fix) if fix: sql = 'DELETE FROM %s_relation WHERE eid_from=%s;' % ( rschema, eid) - session.system_sql(sql) - cursor = session.system_sql('SELECT eid_to FROM %s_relation;' % rschema) + cnx.system_sql(sql) + cursor = cnx.system_sql('SELECT eid_to FROM %s_relation;' % rschema) for row in cursor.fetchall(): eid = row[0] - if not has_eid(session, cursor, eid, eids): + if not has_eid(cnx, cursor, eid, eids): bad_related_msg(rschema, 'object', eid, fix) if fix: sql = 'DELETE FROM %s_relation WHERE eid_to=%s;' % ( rschema, eid) - session.system_sql(sql) + cnx.system_sql(sql) -def check_mandatory_relations(schema, session, eids, fix=1): +def check_mandatory_relations(schema, cnx, eids, fix=1): """check entities missing some mandatory relation""" print 'Checking mandatory relations' msg = '%s #%s is missing mandatory %s relation %s (autofix will delete the entity)' @@ -337,7 +329,7 @@ rql = 'Any X WHERE NOT X %s Y, X is %s' % (rschema, etype) else: rql = 'Any X WHERE NOT Y %s X, X is %s' % (rschema, etype) - for entity in session.execute(rql).entities(): + for entity in cnx.execute(rql).entities(): sys.stderr.write(msg % (entity.cw_etype, entity.eid, role, rschema)) if fix: #if entity.cw_describe()['source']['uri'] == 'system': XXX @@ -345,7 +337,7 @@ notify_fixed(fix) -def check_mandatory_attributes(schema, session, eids, fix=1): +def check_mandatory_attributes(schema, cnx, eids, fix=1): """check for entities stored in the system source missing some mandatory attribute """ @@ -358,40 +350,40 @@ if rdef.cardinality[0] in '1+': rql = 'Any X WHERE X %s NULL, X is %s, X cw_source S, S name "system"' % ( rschema, rdef.subject) - for entity in session.execute(rql).entities(): + for entity in cnx.execute(rql).entities(): sys.stderr.write(msg % (entity.cw_etype, entity.eid, rschema)) if fix: entity.cw_delete() notify_fixed(fix) -def check_metadata(schema, session, eids, fix=1): +def check_metadata(schema, cnx, eids, fix=1): """check entities has required metadata FIXME: rewrite using RQL queries ? """ print 'Checking metadata' - cursor = session.system_sql("SELECT DISTINCT type FROM entities;") + cursor = cnx.system_sql("SELECT DISTINCT type FROM entities;") eidcolumn = SQL_PREFIX + 'eid' msg = ' %s with eid %s has no %s (autofix will set it to now)' for etype, in cursor.fetchall(): - if etype not in session.vreg.schema: + if etype not in cnx.vreg.schema: sys.stderr.write('entities table references unknown type %s\n' % etype) if fix: - session.system_sql("DELETE FROM entities WHERE type = %(type)s", + cnx.system_sql("DELETE FROM entities WHERE type = %(type)s", {'type': etype}) continue table = SQL_PREFIX + etype for rel, default in ( ('creation_date', datetime.now()), ('modification_date', datetime.now()), ): column = SQL_PREFIX + rel - cursor = session.system_sql("SELECT %s FROM %s WHERE %s is NULL" + cursor = cnx.system_sql("SELECT %s FROM %s WHERE %s is NULL" % (eidcolumn, table, column)) for eid, in cursor.fetchall(): sys.stderr.write(msg % (etype, eid, rel)) if fix: - session.system_sql("UPDATE %s SET %s=%%(v)s WHERE %s=%s ;" + cnx.system_sql("UPDATE %s SET %s=%%(v)s WHERE %s=%s ;" % (table, column, eidcolumn, eid), {'v': default}) notify_fixed(fix) @@ -402,22 +394,22 @@ using given user and password to locally connect to the repository (no running cubicweb server needed) """ - session = repo._get_session(cnx.sessionid, setcnxset=True) # yo, launch checks if checks: eids_cache = {} - with session.security_enabled(read=False, write=False): # ensure no read security + with cnx.security_enabled(read=False, write=False): # ensure no read security for check in checks: check_func = globals()['check_%s' % check] - check_func(repo.schema, session, eids_cache, fix=fix) + with cnx.ensure_cnx_set: + check_func(repo.schema, cnx, eids_cache, fix=fix) if fix: - session.commit() + cnx.commit() else: print if not fix: print 'WARNING: Diagnostic run, nothing has been corrected' if reindex: - session.rollback() - session.set_cnxset() - reindex_entities(repo.schema, session, withpb=withpb) - session.commit() + cnx.rollback() + with cnx.ensure_cnx_set: + reindex_entities(repo.schema, cnx, withpb=withpb) + cnx.commit() diff -r 84738d495ffd -r 793377697c81 server/cwzmq.py --- a/server/cwzmq.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/cwzmq.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# copyright 2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2012-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -17,17 +17,17 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -from threading import Thread import cPickle import traceback +from threading import Thread +from logging import getLogger import zmq from zmq.eventloop import ioloop import zmq.eventloop.zmqstream -from logging import getLogger from cubicweb import set_log_methods -from cubicweb.server.server import QuitEvent +from cubicweb.server.server import QuitEvent, Finished ctx = zmq.Context() diff -r 84738d495ffd -r 793377697c81 server/hook.py --- a/server/hook.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/hook.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -203,8 +203,8 @@ instance if you want to disable some integrity checking hook. This can be controlled more finely through the `category` class attribute, which is a string giving a category name. One can then uses the -:meth:`~cubicweb.server.session.Session.deny_all_hooks_but` and -:meth:`~cubicweb.server.session.Session.allow_all_hooks_but` context managers to +:meth:`~cubicweb.server.session.Connection.deny_all_hooks_but` and +:meth:`~cubicweb.server.session.Connection.allow_all_hooks_but` context managers to explicitly enable or disable some categories. The existing categories are: @@ -257,8 +257,8 @@ from logilab.common.decorators import classproperty, cached from logilab.common.deprecation import deprecated, class_renamed from logilab.common.logging_ext import set_log_methods -from logilab.common.registry import (Predicate, NotPredicate, OrPredicate, - objectify_predicate, yes) +from logilab.common.registry import (NotPredicate, OrPredicate, + objectify_predicate) from cubicweb import RegistryNotFound, server from cubicweb.cwvreg import CWRegistry, CWRegistryStore @@ -295,13 +295,13 @@ obj.check_events() super(HooksRegistry, self).register(obj, **kwargs) - def call_hooks(self, event, session=None, **kwargs): + def call_hooks(self, event, cnx=None, **kwargs): """call `event` hooks for an entity or a list of entities (passed respectively as the `entity` or ``entities`` keyword argument). """ kwargs['event'] = event - if session is None: # True for events such as server_start - for hook in sorted(self.possible_objects(session, **kwargs), + if cnx is None: # True for events such as server_start + for hook in sorted(self.possible_objects(cnx, **kwargs), key=lambda x: x.order): hook() else: @@ -318,28 +318,28 @@ else: entities = [] eids_from_to = [] - pruned = self.get_pruned_hooks(session, event, + pruned = self.get_pruned_hooks(cnx, event, entities, eids_from_to, kwargs) # by default, hooks are executed with security turned off - with session.security_enabled(read=False): + with cnx.security_enabled(read=False): for _kwargs in _iter_kwargs(entities, eids_from_to, kwargs): - hooks = sorted(self.filtered_possible_objects(pruned, session, **_kwargs), + hooks = sorted(self.filtered_possible_objects(pruned, cnx, **_kwargs), key=lambda x: x.order) debug = server.DEBUG & server.DBG_HOOKS - with session.security_enabled(write=False): + with cnx.security_enabled(write=False): for hook in hooks: if debug: print event, _kwargs, hook hook() - def get_pruned_hooks(self, session, event, entities, eids_from_to, kwargs): + def get_pruned_hooks(self, cnx, event, entities, eids_from_to, kwargs): """return a set of hooks that should not be considered by filtered_possible objects the idea is to make a first pass over all the hooks in the registry and to mark put some of them in a pruned list. The pruned hooks are the one which: - * are disabled at the session level + * are disabled at the connection level * have a selector containing a :class:`match_rtype` or an :class:`is_instance` predicate which does not match the rtype / etype @@ -362,17 +362,17 @@ else: # nothing to prune, how did we get there ??? return set() cache_key = (event, kwargs.get('rtype'), etype) - pruned = session.pruned_hooks_cache.get(cache_key) + pruned = cnx.pruned_hooks_cache.get(cache_key) if pruned is not None: return pruned pruned = set() - session.pruned_hooks_cache[cache_key] = pruned + cnx.pruned_hooks_cache[cache_key] = pruned if look_for_selector is not None: for id, hooks in self.iteritems(): for hook in hooks: enabled_cat, main_filter = hook.filterable_selectors() if enabled_cat is not None: - if not enabled_cat(hook, session): + if not enabled_cat(hook, cnx): pruned.add(hook) continue if main_filter is not None: @@ -381,7 +381,7 @@ main_filter.toetypes is not None): continue first_kwargs = _iter_kwargs(entities, eids_from_to, kwargs).next() - if not main_filter(hook, session, **first_kwargs): + if not main_filter(hook, cnx, **first_kwargs): pruned.add(hook) return pruned @@ -404,12 +404,12 @@ def __init__(self, vreg): self.vreg = vreg - def call_hooks(self, event, session=None, **kwargs): + def call_hooks(self, event, cnx=None, **kwargs): try: registry = self.vreg['%s_hooks' % event] except RegistryNotFound: return # no hooks for this event - registry.call_hooks(event, session, **kwargs) + registry.call_hooks(event, cnx, **kwargs) for event in ALL_HOOKS: @@ -460,10 +460,10 @@ if kwargs.get('rtype') not in self.expected: return 0 if self.frometypes is not None and \ - req.describe(kwargs['eidfrom'])[0] not in self.frometypes: + req.entity_metas(kwargs['eidfrom'])['type'] not in self.frometypes: return 0 if self.toetypes is not None and \ - req.describe(kwargs['eidto'])[0] not in self.toetypes: + req.entity_metas(kwargs['eidto'])['type'] not in self.toetypes: return 0 return 1 @@ -507,7 +507,7 @@ Hooks being appobjects like views, they have a `__regid__` and a `__select__` class attribute. Like all appobjects, hooks have the `self._cw` attribute which - represents the current session. In entity hooks, a `self.entity` attribute is + represents the current connection. In entity hooks, a `self.entity` attribute is also present. The `events` tuple is used by the base class selector to dispatch the hook @@ -604,7 +604,7 @@ def __call__(self): assert self.main_rtype for eid in (self.eidfrom, self.eidto): - etype = self._cw.describe(eid)[0] + etype = self._cw.entity_metas(eid)['type'] if self.main_rtype not in self._cw.vreg.schema.eschema(etype).subjrels: return if self.rtype in self.subject_relations: @@ -640,7 +640,7 @@ skip_object_relations = () def __call__(self): - eschema = self._cw.vreg.schema.eschema(self._cw.describe(self.eidfrom)[0]) + eschema = self._cw.vreg.schema.eschema(self._cw.entity_metas(self.eidfrom)['type']) execute = self._cw.execute for rel in self.subject_relations: if rel in eschema.subjrels and not rel in self.skip_subject_relations: @@ -664,7 +664,7 @@ events = ('after_delete_relation',) def __call__(self): - eschema = self._cw.vreg.schema.eschema(self._cw.describe(self.eidfrom)[0]) + eschema = self._cw.vreg.schema.eschema(self._cw.entity_metas(self.eidfrom)['type']) execute = self._cw.execute for rel in self.subject_relations: if rel in eschema.subjrels and not rel in self.skip_subject_relations: @@ -685,7 +685,7 @@ """Base class for operations. Operation may be instantiated in the hooks' `__call__` method. It always - takes a session object as first argument (accessible as `.session` from the + takes a connection object as first argument (accessible as `.cnx` from the operation instance), and optionally all keyword arguments needed by the operation. These keyword arguments will be accessible as attributes from the operation instance. @@ -720,8 +720,8 @@ the transaction is over. All the ORM entities accessed by the earlier transaction are invalid. If you need to work on the database, you need to - start a new transaction, for instance using a new internal session, which - you will need to commit (and close!). + start a new transaction, for instance using a new internal connection, + which you will need to commit. For an operation to support an event, one has to implement the `_event` method with no arguments. @@ -731,24 +731,29 @@ base hook class used). """ - def __init__(self, session, **kwargs): - self.session = session + def __init__(self, cnx, **kwargs): + self.cnx = cnx self.__dict__.update(kwargs) - self.register(session) + self.register(cnx) # execution information self.processed = None # 'precommit', 'commit' self.failed = False - def register(self, session): - session.add_operation(self, self.insert_index()) + @property + @deprecated('[3.19] Operation.session is deprecated, use Operation.cnx instead') + def session(self): + return self.cnx + + def register(self, cnx): + cnx.add_operation(self, self.insert_index()) def insert_index(self): - """return the index of the lastest instance which is not a + """return the index of the latest instance which is not a LateOperation instance """ # faster by inspecting operation in reverse order for heavy transactions i = None - for i, op in enumerate(reversed(self.session.pending_operations)): + for i, op in enumerate(reversed(self.cnx.pending_operations)): if isinstance(op, (LateOperation, SingleLastOperation)): continue return -i or None @@ -849,12 +854,12 @@ return ('cw.dataops', cls.__name__) @classmethod - def get_instance(cls, session, **kwargs): + def get_instance(cls, cnx, **kwargs): # no need to lock: transaction_data already comes from thread's local storage try: - return session.transaction_data[cls.data_key] + return cnx.transaction_data[cls.data_key] except KeyError: - op = session.transaction_data[cls.data_key] = cls(session, **kwargs) + op = cnx.transaction_data[cls.data_key] = cls(cnx, **kwargs) return op def __init__(self, *args, **kwargs): @@ -892,14 +897,14 @@ Iterating over operation data closed it and should be reserved to precommit / postcommit method of the operation.""" self._processed = True - op = self.session.transaction_data.pop(self.data_key) + op = self.cnx.transaction_data.pop(self.data_key) assert op is self, "Bad handling of operation data, found %s instead of %s for key %s" % ( op, self, self.data_key) return self._container -@deprecated('[3.10] use opcls.get_instance(session, **opkwargs).add_data(value)') -def set_operation(session, datakey, value, opcls, containercls=set, **opkwargs): +@deprecated('[3.10] use opcls.get_instance(cnx, **opkwargs).add_data(value)') +def set_operation(cnx, datakey, value, opcls, containercls=set, **opkwargs): """Function to ease applying a single operation on a set of data, avoiding to create as many as operation as they are individual modification. You should try to use this instead of creating on operation for each `value`, @@ -907,10 +912,10 @@ Arguments are: - * the `session` object + * `cnx`, the current connection * `datakey`, a specially forged key that will be used as key in - session.transaction_data + cnx.transaction_data * `value` that is the actual payload of an individual operation @@ -940,15 +945,15 @@ get unexpected data loss in some case of nested hooks. """ try: - # Search for session.transaction_data[`datakey`] (expected to be a set): + # Search for cnx.transaction_data[`datakey`] (expected to be a set): # if found, simply append `value` - _container_add(session.transaction_data[datakey], value) + _container_add(cnx.transaction_data[datakey], value) except KeyError: # else, initialize it to containercls([`value`]) and instantiate the given # `opcls` operation class with additional keyword arguments - opcls(session, **opkwargs) - session.transaction_data[datakey] = containercls() - _container_add(session.transaction_data[datakey], value) + opcls(cnx, **opkwargs) + cnx.transaction_data[datakey] = containercls() + _container_add(cnx.transaction_data[datakey], value) class LateOperation(Operation): @@ -961,7 +966,7 @@ """ # faster by inspecting operation in reverse order for heavy transactions i = None - for i, op in enumerate(reversed(self.session.pending_operations)): + for i, op in enumerate(reversed(self.cnx.pending_operations)): if isinstance(op, SingleLastOperation): continue return -i or None @@ -976,17 +981,17 @@ operations """ - def register(self, session): + def register(self, cnx): """override register to handle cases where this operation has already been added """ - operations = session.pending_operations + operations = cnx.pending_operations index = self.equivalent_index(operations) if index is not None: equivalent = operations.pop(index) else: equivalent = None - session.add_operation(self, self.insert_index()) + cnx.add_operation(self, self.insert_index()) return equivalent def equivalent_index(self, operations): @@ -1001,7 +1006,7 @@ class SendMailOp(SingleLastOperation): - def __init__(self, session, msg=None, recipients=None, **kwargs): + def __init__(self, cnx, msg=None, recipients=None, **kwargs): # may not specify msg yet, as # `cubicweb.sobjects.supervision.SupervisionMailOp` if msg is not None: @@ -1010,18 +1015,18 @@ else: assert recipients is None self.to_send = [] - super(SendMailOp, self).__init__(session, **kwargs) + super(SendMailOp, self).__init__(cnx, **kwargs) - def register(self, session): - previous = super(SendMailOp, self).register(session) + def register(self, cnx): + previous = super(SendMailOp, self).register(cnx) if previous: self.to_send = previous.to_send + self.to_send def postcommit_event(self): - self.session.repo.threaded_task(self.sendmails) + self.cnx.repo.threaded_task(self.sendmails) def sendmails(self): - self.session.vreg.config.sendmails(self.to_send) + self.cnx.vreg.config.sendmails(self.to_send) class RQLPrecommitOperation(Operation): @@ -1029,7 +1034,7 @@ rqls = None def precommit_event(self): - execute = self.session.execute + execute = self.cnx.execute for rql in self.rqls: execute(*rql) @@ -1051,7 +1056,7 @@ remove inserted eid from repository type/source cache """ try: - self.session.repo.clear_caches(self.get_data()) + self.cnx.repo.clear_caches(self.get_data()) except KeyError: pass @@ -1066,7 +1071,7 @@ """ try: eids = self.get_data() - self.session.repo.clear_caches(eids) - self.session.repo.app_instances_bus.publish(['delete'] + list(str(eid) for eid in eids)) + self.cnx.repo.clear_caches(eids) + self.cnx.repo.app_instances_bus.publish(['delete'] + list(str(eid) for eid in eids)) except KeyError: pass diff -r 84738d495ffd -r 793377697c81 server/ldaputils.py --- a/server/ldaputils.py Wed Sep 24 17:35:59 2014 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,360 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb utilities for ldap sources - -Part of the code is coming form Zope's LDAPUserFolder - -Copyright (c) 2004 Jens Vagelpohl. -All Rights Reserved. - -This software is subject to the provisions of the Zope Public License, -Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. -THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED -WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS -FOR A PARTICULAR PURPOSE. -""" - -from __future__ import division # XXX why? - -from datetime import datetime - -import ldap -from ldap.ldapobject import ReconnectLDAPObject -from ldap.filter import filter_format -from ldapurl import LDAPUrl - -from cubicweb import ValidationError, AuthenticationError, Binary -from cubicweb.server import utils -from cubicweb.server.sources import ConnectionWrapper - -_ = unicode - -# search scopes -BASE = ldap.SCOPE_BASE -ONELEVEL = ldap.SCOPE_ONELEVEL -SUBTREE = ldap.SCOPE_SUBTREE - -# map ldap protocol to their standard port -PROTO_PORT = {'ldap': 389, - 'ldaps': 636, - 'ldapi': None, - } - - -class LDAPSourceMixIn(object): - """a mix-in for LDAP based source""" - options = ( - ('auth-mode', - {'type' : 'choice', - 'default': 'simple', - 'choices': ('simple', 'cram_md5', 'digest_md5', 'gssapi'), - 'help': 'authentication mode used to authenticate user to the ldap.', - 'group': 'ldap-source', 'level': 3, - }), - ('auth-realm', - {'type' : 'string', - 'default': None, - 'help': 'realm to use when using gssapi/kerberos authentication.', - 'group': 'ldap-source', 'level': 3, - }), - - ('data-cnx-dn', - {'type' : 'string', - 'default': '', - 'help': 'user dn to use to open data connection to the ldap (eg used \ -to respond to rql queries). Leave empty for anonymous bind', - 'group': 'ldap-source', 'level': 1, - }), - ('data-cnx-password', - {'type' : 'string', - 'default': '', - 'help': 'password to use to open data connection to the ldap (eg used to respond to rql queries). Leave empty for anonymous bind.', - 'group': 'ldap-source', 'level': 1, - }), - - ('user-base-dn', - {'type' : 'string', - 'default': '', - 'help': 'base DN to lookup for users; disable user importation mechanism if unset', - 'group': 'ldap-source', 'level': 1, - }), - ('user-scope', - {'type' : 'choice', - 'default': 'ONELEVEL', - 'choices': ('BASE', 'ONELEVEL', 'SUBTREE'), - 'help': 'user search scope (valid values: "BASE", "ONELEVEL", "SUBTREE")', - 'group': 'ldap-source', 'level': 1, - }), - ('user-classes', - {'type' : 'csv', - 'default': ('top', 'posixAccount'), - 'help': 'classes of user (with Active Directory, you want to say "user" here)', - 'group': 'ldap-source', 'level': 1, - }), - ('user-filter', - {'type': 'string', - 'default': '', - 'help': 'additional filters to be set in the ldap query to find valid users', - 'group': 'ldap-source', 'level': 2, - }), - ('user-login-attr', - {'type' : 'string', - 'default': 'uid', - 'help': 'attribute used as login on authentication (with Active Directory, you want to use "sAMAccountName" here)', - 'group': 'ldap-source', 'level': 1, - }), - ('user-default-group', - {'type' : 'csv', - 'default': ('users',), - 'help': 'name of a group in which ldap users will be by default. \ -You can set multiple groups by separating them by a comma.', - 'group': 'ldap-source', 'level': 1, - }), - ('user-attrs-map', - {'type' : 'named', - 'default': {'uid': 'login', 'gecos': 'email', 'userPassword': 'upassword'}, - 'help': 'map from ldap user attributes to cubicweb attributes (with Active Directory, you want to use sAMAccountName:login,mail:email,givenName:firstname,sn:surname)', - 'group': 'ldap-source', 'level': 1, - }), - - ) - - _conn = None - - def _entity_update(self, source_entity): - super(LDAPSourceMixIn, self)._entity_update(source_entity) - if self.urls: - if len(self.urls) > 1: - raise ValidationError(source_entity.eid, {'url': _('can only have one url')}) - try: - protocol, hostport = self.urls[0].split('://') - except ValueError: - raise ValidationError(source_entity.eid, {'url': _('badly formatted url')}) - if protocol not in PROTO_PORT: - raise ValidationError(source_entity.eid, {'url': _('unsupported protocol')}) - - def update_config(self, source_entity, typedconfig): - """update configuration from source entity. `typedconfig` is config - properly typed with defaults set - """ - super(LDAPSourceMixIn, self).update_config(source_entity, typedconfig) - self.authmode = typedconfig['auth-mode'] - self._authenticate = getattr(self, '_auth_%s' % self.authmode) - self.cnx_dn = typedconfig['data-cnx-dn'] - self.cnx_pwd = typedconfig['data-cnx-password'] - self.user_base_dn = str(typedconfig['user-base-dn']) - self.user_base_scope = globals()[typedconfig['user-scope']] - self.user_login_attr = typedconfig['user-login-attr'] - self.user_default_groups = typedconfig['user-default-group'] - self.user_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'} - self.user_attrs.update(typedconfig['user-attrs-map']) - self.user_rev_attrs = dict((v, k) for k, v in self.user_attrs.iteritems()) - self.base_filters = [filter_format('(%s=%s)', ('objectClass', o)) - for o in typedconfig['user-classes']] - if typedconfig['user-filter']: - self.base_filters.append(typedconfig['user-filter']) - self._conn = None - - def connection_info(self): - assert len(self.urls) == 1, self.urls - protocol, hostport = self.urls[0].split('://') - if protocol != 'ldapi' and not ':' in hostport: - hostport = '%s:%s' % (hostport, PROTO_PORT[protocol]) - return protocol, hostport - - def get_connection(self): - """open and return a connection to the source""" - if self._conn is None: - try: - self._connect() - except Exception: - self.exception('unable to connect to ldap') - return ConnectionWrapper(self._conn) - - def authenticate(self, session, login, password=None, **kwargs): - """return CWUser eid for the given login/password if this account is - defined in this source, else raise `AuthenticationError` - - two queries are needed since passwords are stored crypted, so we have - to fetch the salt first - """ - self.info('ldap authenticate %s', login) - if not password: - # On Windows + ADAM this would have succeeded (!!!) - # You get Authenticated as: 'NT AUTHORITY\ANONYMOUS LOGON'. - # we really really don't want that - raise AuthenticationError() - searchfilter = [filter_format('(%s=%s)', (self.user_login_attr, login))] - searchfilter.extend(self.base_filters) - searchstr = '(&%s)' % ''.join(searchfilter) - # first search the user - try: - user = self._search(session, self.user_base_dn, - self.user_base_scope, searchstr)[0] - except (IndexError, ldap.SERVER_DOWN): - # no such user - raise AuthenticationError() - # check password by establishing a (unused) connection - try: - self._connect(user, password) - except ldap.LDAPError as ex: - # Something went wrong, most likely bad credentials - self.info('while trying to authenticate %s: %s', user, ex) - raise AuthenticationError() - except Exception: - self.error('while trying to authenticate %s', user, exc_info=True) - raise AuthenticationError() - eid = self.repo.extid2eid(self, user['dn'], 'CWUser', session, {}) - if eid < 0: - # user has been moved away from this source - raise AuthenticationError() - return eid - - def _connect(self, user=None, userpwd=None): - protocol, hostport = self.connection_info() - self.info('connecting %s://%s as %s', protocol, hostport, - user and user['dn'] or 'anonymous') - # don't require server certificate when using ldaps (will - # enable self signed certs) - ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER) - url = LDAPUrl(urlscheme=protocol, hostport=hostport) - conn = ReconnectLDAPObject(url.initializeUrl()) - # Set the protocol version - version 3 is preferred - try: - conn.set_option(ldap.OPT_PROTOCOL_VERSION, ldap.VERSION3) - except ldap.LDAPError: # Invalid protocol version, fall back safely - conn.set_option(ldap.OPT_PROTOCOL_VERSION, ldap.VERSION2) - # Deny auto-chasing of referrals to be safe, we handle them instead - # Required for AD - try: - conn.set_option(ldap.OPT_REFERRALS, 0) - except ldap.LDAPError: # Cannot set referrals, so do nothing - pass - #conn.set_option(ldap.OPT_NETWORK_TIMEOUT, conn_timeout) - #conn.timeout = op_timeout - # Now bind with the credentials given. Let exceptions propagate out. - if user is None: - # no user specified, we want to initialize the 'data' connection, - assert self._conn is None - self._conn = conn - # XXX always use simple bind for data connection - if not self.cnx_dn: - conn.simple_bind_s(self.cnx_dn, self.cnx_pwd) - else: - self._authenticate(conn, {'dn': self.cnx_dn}, self.cnx_pwd) - else: - # user specified, we want to check user/password, no need to return - # the connection which will be thrown out - self._authenticate(conn, user, userpwd) - return conn - - def _auth_simple(self, conn, user, userpwd): - conn.simple_bind_s(user['dn'], userpwd) - - def _auth_cram_md5(self, conn, user, userpwd): - from ldap import sasl - auth_token = sasl.cram_md5(user['dn'], userpwd) - conn.sasl_interactive_bind_s('', auth_token) - - def _auth_digest_md5(self, conn, user, userpwd): - from ldap import sasl - auth_token = sasl.digest_md5(user['dn'], userpwd) - conn.sasl_interactive_bind_s('', auth_token) - - def _auth_gssapi(self, conn, user, userpwd): - # print XXX not proper sasl/gssapi - import kerberos - if not kerberos.checkPassword(user[self.user_login_attr], userpwd): - raise Exception('BAD login / mdp') - #from ldap import sasl - #conn.sasl_interactive_bind_s('', sasl.gssapi()) - - def _search(self, session, base, scope, - searchstr='(objectClass=*)', attrs=()): - """make an ldap query""" - self.debug('ldap search %s %s %s %s %s', self.uri, base, scope, - searchstr, list(attrs)) - # XXX for now, we do not have connections set support for LDAP, so - # this is always self._conn - cnx = self.get_connection().cnx #session.cnxset.connection(self.uri).cnx - if cnx is None: - # cant connect to server - msg = session._("can't connect to source %s, some data may be missing") - session.set_shared_data('sources_error', msg % self.uri, txdata=True) - return [] - try: - res = cnx.search_s(base, scope, searchstr, attrs) - except ldap.PARTIAL_RESULTS: - res = cnx.result(all=0)[1] - except ldap.NO_SUCH_OBJECT: - self.info('ldap NO SUCH OBJECT %s %s %s', base, scope, searchstr) - self._process_no_such_object(session, base) - return [] - # except ldap.REFERRAL as e: - # cnx = self.handle_referral(e) - # try: - # res = cnx.search_s(base, scope, searchstr, attrs) - # except ldap.PARTIAL_RESULTS: - # res_type, res = cnx.result(all=0) - result = [] - for rec_dn, rec_dict in res: - # When used against Active Directory, "rec_dict" may not be - # be a dictionary in some cases (instead, it can be a list) - # - # An example of a useless "res" entry that can be ignored - # from AD is - # (None, ['ldap://ForestDnsZones.PORTAL.LOCAL/DC=ForestDnsZones,DC=PORTAL,DC=LOCAL']) - # This appears to be some sort of internal referral, but - # we can't handle it, so we need to skip over it. - try: - items = rec_dict.iteritems() - except AttributeError: - continue - else: - itemdict = self._process_ldap_item(rec_dn, items) - result.append(itemdict) - self.debug('ldap built results %s', len(result)) - return result - - def _process_ldap_item(self, dn, iterator): - """Turn an ldap received item into a proper dict.""" - itemdict = {'dn': dn} - for key, value in iterator: - if self.user_attrs.get(key) == 'upassword': # XXx better password detection - value = value[0].encode('utf-8') - # we only support ldap_salted_sha1 for ldap sources, see: server/utils.py - if not value.startswith('{SSHA}'): - value = utils.crypt_password(value) - itemdict[key] = Binary(value) - elif self.user_attrs.get(key) == 'modification_date': - itemdict[key] = datetime.strptime(value[0], '%Y%m%d%H%M%SZ') - else: - value = [unicode(val, 'utf-8', 'replace') for val in value] - if len(value) == 1: - itemdict[key] = value = value[0] - else: - itemdict[key] = value - return itemdict - - def _process_no_such_object(self, session, dn): - """Some search return NO_SUCH_OBJECT error, handle this (usually because - an object whose dn is no more existent in ldap as been encountered). - - Do nothing by default, let sub-classes handle that. - """ diff -r 84738d495ffd -r 793377697c81 server/migractions.py --- a/server/migractions.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/migractions.py Wed Sep 24 18:04:30 2014 +0200 @@ -53,15 +53,11 @@ PURE_VIRTUAL_RTYPES, CubicWebRelationSchema, order_eschemas) from cubicweb.cwvreg import CW_EVENT_MANAGER -from cubicweb.dbapi import get_repository, _repo_connect +from cubicweb import repoapi from cubicweb.migration import MigrationHelper, yes -from cubicweb.server import hook -try: - from cubicweb.server import SOURCE_TYPES, schemaserial as ss - from cubicweb.server.utils import manager_userpasswd - from cubicweb.server.sqlutils import sqlexec, SQL_PREFIX -except ImportError: # LAX - pass +from cubicweb.server import hook, schemaserial as ss +from cubicweb.server.utils import manager_userpasswd +from cubicweb.server.sqlutils import sqlexec, SQL_PREFIX def mock_object(**params): @@ -82,6 +78,7 @@ if not cls.__regid__ in repo.vreg['after_add_entity_hooks']: repo.vreg.register(ClearGroupMap) + class ServerMigrationHelper(MigrationHelper): """specific migration helper for server side migration scripts, providing actions related to schema/data migration @@ -95,10 +92,14 @@ assert repo if cnx is not None: assert repo - self._cnx = cnx + self.cnx = cnx self.repo = repo + self.session = cnx._session elif connect: self.repo_connect() + self.set_session() + else: + self.session = None # no config on shell to a remote instance if config is not None and (cnx or connect): repo = self.repo @@ -124,11 +125,37 @@ self.fs_schema = schema self._synchronized = set() + def set_session(self): + try: + login = self.repo.config.default_admin_config['login'] + pwd = self.repo.config.default_admin_config['password'] + except KeyError: + login, pwd = manager_userpasswd() + while True: + try: + self.cnx = repoapi.connect(self.repo, login, password=pwd) + if not 'managers' in self.cnx.user.groups: + print 'migration need an account in the managers group' + else: + break + except AuthenticationError: + print 'wrong user/password' + except (KeyboardInterrupt, EOFError): + print 'aborting...' + sys.exit(0) + try: + login, pwd = manager_userpasswd() + except (KeyboardInterrupt, EOFError): + print 'aborting...' + sys.exit(0) + self.session = self.repo._get_session(self.cnx.sessionid) + self.session.keep_cnxset_mode('transaction') + # overriden from base MigrationHelper ###################################### @cached def repo_connect(self): - self.repo = get_repository(config=self.config) + self.repo = repoapi.get_repository(config=self.config) return self.repo def cube_upgraded(self, cube, version): @@ -147,18 +174,19 @@ elif options.backup_db: self.backup_database(askconfirm=False) # disable notification during migration - with self.session.allow_all_hooks_but('notification'): + with self.cnx.allow_all_hooks_but('notification'): super(ServerMigrationHelper, self).migrate(vcconf, toupgrade, options) def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs): - try: - return super(ServerMigrationHelper, self).cmd_process_script( - migrscript, funcname, *args, **kwargs) - except ExecutionError as err: - sys.stderr.write("-> %s\n" % err) - except BaseException: - self.rollback() - raise + with self.cnx._cnx.ensure_cnx_set: + try: + return super(ServerMigrationHelper, self).cmd_process_script( + migrscript, funcname, *args, **kwargs) + except ExecutionError as err: + sys.stderr.write("-> %s\n" % err) + except BaseException: + self.rollback() + raise # Adjust docstring cmd_process_script.__doc__ = MigrationHelper.cmd_process_script.__doc__ @@ -186,18 +214,18 @@ open(backupfile,'w').close() # kinda lock os.chmod(backupfile, 0600) # backup + source = repo.system_source tmpdir = tempfile.mkdtemp() try: failed = False - for source in repo.sources: - try: - source.backup(osp.join(tmpdir, source.uri), self.confirm, format=format) - except Exception as ex: - print '-> error trying to backup %s [%s]' % (source.uri, ex) - if not self.confirm('Continue anyway?', default='n'): - raise SystemExit(1) - else: - failed = True + try: + source.backup(osp.join(tmpdir, source.uri), self.confirm, format=format) + except Exception as ex: + print '-> error trying to backup %s [%s]' % (source.uri, ex) + if not self.confirm('Continue anyway?', default='n'): + raise SystemExit(1) + else: + failed = True with open(osp.join(tmpdir, 'format.txt'), 'w') as format_file: format_file.write('%s\n' % format) with open(osp.join(tmpdir, 'versions.txt'), 'w') as version_file: @@ -216,8 +244,7 @@ finally: shutil.rmtree(tmpdir) - def restore_database(self, backupfile, drop=True, systemonly=True, - askconfirm=True, format='native'): + def restore_database(self, backupfile, drop=True, askconfirm=True, format='native'): # check if not osp.exists(backupfile): raise ExecutionError("Backup file %s doesn't exist" % backupfile) @@ -246,76 +273,26 @@ format = written_format self.config.init_cnxset_pool = False repo = self.repo_connect() - for source in repo.sources: - if systemonly and source.uri != 'system': - continue - try: - source.restore(osp.join(tmpdir, source.uri), self.confirm, drop, format) - except Exception as exc: - print '-> error trying to restore %s [%s]' % (source.uri, exc) - if not self.confirm('Continue anyway?', default='n'): - raise SystemExit(1) + source = repo.system_source + try: + source.restore(osp.join(tmpdir, source.uri), self.confirm, drop, format) + except Exception as exc: + print '-> error trying to restore %s [%s]' % (source.uri, exc) + if not self.confirm('Continue anyway?', default='n'): + raise SystemExit(1) shutil.rmtree(tmpdir) # call hooks repo.init_cnxset_pool() repo.hm.call_hooks('server_restore', repo=repo, timestamp=backupfile) print '-> database restored.' - @property - def cnx(self): - """lazy connection""" - try: - return self._cnx - except AttributeError: - sourcescfg = self.repo.config.sources() - try: - login = sourcescfg['admin']['login'] - pwd = sourcescfg['admin']['password'] - except KeyError: - login, pwd = manager_userpasswd() - while True: - try: - self._cnx = _repo_connect(self.repo, login, password=pwd) - if not 'managers' in self._cnx.user(self.session).groups: - print 'migration need an account in the managers group' - else: - break - except AuthenticationError: - print 'wrong user/password' - except (KeyboardInterrupt, EOFError): - print 'aborting...' - sys.exit(0) - try: - login, pwd = manager_userpasswd() - except (KeyboardInterrupt, EOFError): - print 'aborting...' - sys.exit(0) - self.session.keep_cnxset_mode('transaction') - return self._cnx - - @property - def session(self): - if self.config is not None: - session = self.repo._get_session(self.cnx.sessionid) - if session.cnxset is None: - session.read_security = False - session.write_security = False - session.set_cnxset() - return session - # no access to session on remote instance - return None - def commit(self): - if hasattr(self, '_cnx'): - self._cnx.commit() - if self.session: - self.session.set_cnxset() + if hasattr(self, 'cnx'): + self.cnx.commit(free_cnxset=False) def rollback(self): - if hasattr(self, '_cnx'): - self._cnx.rollback() - if self.session: - self.session.set_cnxset() + if hasattr(self, 'cnx'): + self.cnx.rollback(free_cnxset=False) def rqlexecall(self, rqliter, ask_confirm=False): for rql, kwargs in rqliter: @@ -333,7 +310,7 @@ 'schema': self.repo.get_schema(), 'cnx': self.cnx, 'fsschema': self.fs_schema, - 'session' : self.session, + 'session' : self.cnx._cnx, 'repo' : self.repo, }) return context @@ -341,12 +318,12 @@ @cached def group_mapping(self): """cached group mapping""" - return ss.group_mapping(self._cw) + return ss.group_mapping(self.cnx) @cached def cstrtype_mapping(self): """cached constraint types mapping""" - return ss.cstrtype_mapping(self._cw) + return ss.cstrtype_mapping(self.cnx) def cmd_exec_event_script(self, event, cube=None, funcname=None, *args, **kwargs): @@ -371,7 +348,7 @@ self.execscript_confirm = yes try: if event == 'postcreate': - with self.session.allow_all_hooks_but(): + with self.cnx.allow_all_hooks_but(): return self.cmd_process_script(apc, funcname, *args, **kwargs) return self.cmd_process_script(apc, funcname, *args, **kwargs) finally: @@ -393,7 +370,7 @@ sql_scripts = glob(osp.join(directory, '*.%s.sql' % driver)) for fpath in sql_scripts: print '-> installing', fpath - failed = sqlexec(open(fpath).read(), self.session.system_sql, False, + failed = sqlexec(open(fpath).read(), self.cnx.system_sql, False, delimiter=';;') if failed: print '-> ERROR, skipping', fpath @@ -562,7 +539,7 @@ repo = {} for cols in eschema._unique_together or (): fs[unique_index_name(repoeschema, cols)] = sorted(cols) - schemaentity = self.session.entity_from_eid(repoeschema.eid) + schemaentity = self.cnx.entity_from_eid(repoeschema.eid) for entity in schemaentity.related('constraint_of', 'object', targettypes=('CWUniqueTogetherConstraint',)).entities(): repo[entity.name] = sorted(rel.name for rel in entity.relations) @@ -630,21 +607,8 @@ # out of sync with newconstraints when multiple # constraints of the same type are used for cstr in oldconstraints: - for newcstr in newconstraints: - if newcstr.type() == cstr.type(): - break - else: - newcstr = None - if newcstr is None: - self.rqlexec('DELETE X constrained_by C WHERE C eid %(x)s', - {'x': cstr.eid}, ask_confirm=confirm) - else: - newconstraints.remove(newcstr) - value = unicode(newcstr.serialize()) - if value != unicode(cstr.serialize()): - self.rqlexec('SET X value %(v)s WHERE X eid %(x)s', - {'x': cstr.eid, 'v': value}, - ask_confirm=confirm) + self.rqlexec('DELETE CWConstraint C WHERE C eid %(x)s', + {'x': cstr.eid}, ask_confirm=confirm) # 2. add new constraints cstrtype_map = self.cstrtype_mapping() self.rqlexecall(ss.constraints2rql(cstrtype_map, newconstraints, @@ -719,7 +683,7 @@ str(totype)) # execute post-create files for cube in reversed(newcubes): - with self.session.allow_all_hooks_but(): + with self.cnx.allow_all_hooks_but(): self.cmd_exec_event_script('postcreate', cube) self.commit() @@ -821,7 +785,7 @@ groupmap = self.group_mapping() cstrtypemap = self.cstrtype_mapping() # register the entity into CWEType - execute = self._cw.execute + execute = self.cnx.execute ss.execschemarql(execute, eschema, ss.eschema2rql(eschema, groupmap)) # add specializes relation if needed specialized = eschema.specializes() @@ -1001,8 +965,8 @@ # repository caches are properly cleanup hook.CleanupDeletedEidsCacheOp.get_instance(session).union(thispending) # and don't forget to remove record from system tables - entities = [session.entity_from_eid(eid, rdeftype) for eid in thispending] - self.repo.system_source.delete_info_multi(session, entities, 'system') + entities = [self.cnx.entity_from_eid(eid, rdeftype) for eid in thispending] + self.repo.system_source.delete_info_multi(self.cnx._cnx, entities) self.sqlexec('DELETE FROM cw_%s WHERE cw_from_entity=%%(eid)s OR ' 'cw_to_entity=%%(eid)s' % rdeftype, {'eid': oldeid}, ask_confirm=False) @@ -1050,7 +1014,7 @@ """ reposchema = self.repo.schema rschema = self.fs_schema.rschema(rtype) - execute = self._cw.execute + execute = self.cnx.execute if rtype in reposchema: print 'warning: relation type %s is already known, skip addition' % ( rtype) @@ -1129,7 +1093,7 @@ subjtype, rtype, objtype) return rdef = self._get_rdef(rschema, subjtype, objtype) - ss.execschemarql(self._cw.execute, rdef, + ss.execschemarql(self.cnx.execute, rdef, ss.rdef2rql(rdef, self.cstrtype_mapping(), self.group_mapping())) if commit: @@ -1356,14 +1320,6 @@ # other data migration commands ########################################### - @property - def _cw(self): - session = self.session - if session is not None: - session.set_cnxset() - return session - return self.cnx.request() - def cmd_storage_changed(self, etype, attribute): """migrate entities to a custom storage. The new storage is expected to be set, it will be temporarily removed for the migration. @@ -1387,22 +1343,28 @@ def cmd_create_entity(self, etype, commit=False, **kwargs): """add a new entity of the given type""" - entity = self._cw.create_entity(etype, **kwargs) + entity = self.cnx.create_entity(etype, **kwargs) if commit: self.commit() return entity + def cmd_find(self, etype, **kwargs): + """find entities of the given type and attribute values""" + return self.cnx.find(etype, **kwargs) + + @deprecated("[3.19] use find(*args, **kwargs).entities() instead") def cmd_find_entities(self, etype, **kwargs): """find entities of the given type and attribute values""" - return self._cw.find_entities(etype, **kwargs) + return self.cnx.find(etype, **kwargs).entities() + @deprecated("[3.19] use find(*args, **kwargs).one() instead") def cmd_find_one_entity(self, etype, **kwargs): """find one entity of the given type and attribute values. raise :exc:`cubicweb.req.FindEntityError` if can not return one and only one entity. """ - return self._cw.find_one_entity(etype, **kwargs) + return self.cnx.find(etype, **kwargs).one() def cmd_update_etype_fti_weight(self, etype, weight): if self.repo.system_source.dbdriver == 'postgres': @@ -1416,7 +1378,7 @@ indexable entity types """ from cubicweb.server.checkintegrity import reindex_entities - reindex_entities(self.repo.schema, self.session, etypes=etypes) + reindex_entities(self.repo.schema, self.cnx._cnx, etypes=etypes) @contextmanager def cmd_dropped_constraints(self, etype, attrname, cstrtype=None, @@ -1461,7 +1423,7 @@ """ if not ask_confirm or self.confirm('Execute sql: %s ?' % sql): try: - cu = self.session.system_sql(sql, args) + cu = self.cnx.system_sql(sql, args) except Exception: ex = sys.exc_info()[1] if self.confirm('Error: %s\nabort?' % ex, pdb=True): @@ -1479,7 +1441,7 @@ if not isinstance(rql, (tuple, list)): rql = ( (rql, kwargs), ) res = None - execute = self._cw.execute + execute = self.cnx.execute for rql, kwargs in rql: if kwargs: msg = '%s (%s)' % (rql, kwargs) @@ -1515,7 +1477,7 @@ self.sqlexec(sql, ask_confirm=False) dbhelper = self.repo.system_source.dbhelper sqltype = dbhelper.TYPE_MAPPING[newtype] - cursor = self.session.cnxset[self.repo.system_source.uri] + cursor = self.cnx._cnx.cnxset.cu dbhelper.change_col_type(cursor, 'cw_%s' % etype, 'cw_%s' % attr, sqltype, allownull) if commit: self.commit() diff -r 84738d495ffd -r 793377697c81 server/msplanner.py --- a/server/msplanner.py Wed Sep 24 17:35:59 2014 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1821 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""plan execution of rql queries on multiple sources - -the best way to understand what are we trying to acheive here is to read the -unit-tests in unittest_msplanner.py - - -What you need to know -~~~~~~~~~~~~~~~~~~~~~ -1. The system source is expected to support every entity and relation types - -2. Given "X relation Y": - - * if relation, X and Y types are supported by the external source, we suppose - by default that X and Y should both come from the same source as the - relation. You can specify otherwise by adding relation into the - "cross_relations" set in the source's mapping file and it that case, we'll - consider that we can also find in the system source some relation between - X and Y coming from different sources. - - * if "relation" isn't supported by the external source but X or Y - types (or both) are, we suppose by default that can find in the system - source some relation where X and/or Y come from the external source. You - can specify otherwise by adding relation into the "dont_cross_relations" - set in the source's mapping file and it that case, we'll consider that we - can only find in the system source some relation between X and Y coming - the system source. - - -Implementation -~~~~~~~~~~~~~~ -XXX explain algorithm - - -Exemples of multi-sources query execution -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -For a system source and a ldap user source (only CWUser and its attributes -is supported, no group or such): - -:CWUser X: -1. fetch CWUser X from both sources and return concatenation of results - -:CWUser X WHERE X in_group G, G name 'users': -* catch 1 - 1. fetch CWUser X from both sources, store concatenation of results into a - temporary table - 2. return the result of TMP X WHERE X in_group G, G name 'users' from the - system source -* catch 2 - 1. return the result of CWUser X WHERE X in_group G, G name 'users' from system - source, that's enough (optimization of the sql querier will avoid join on - CWUser, so we will directly get local eids) - -:CWUser X,L WHERE X in_group G, X login L, G name 'users': -1. fetch Any X,L WHERE X is CWUser, X login L from both sources, store - concatenation of results into a temporary table -2. return the result of Any X, L WHERE X is TMP, X login LX in_group G, - G name 'users' from the system source - - -:Any X WHERE X owned_by Y: -* catch 1 - 1. fetch CWUser X from both sources, store concatenation of results into a - temporary table - 2. return the result of Any X WHERE X owned_by Y, Y is TMP from the system - source -* catch 2 - 1. return the result of Any X WHERE X owned_by Y from system source, that's - enough (optimization of the sql querier will avoid join on CWUser, so we - will directly get local eids) -""" - -__docformat__ = "restructuredtext en" - -from itertools import imap, ifilterfalse - -from logilab.common.decorators import cached -from logilab.common.deprecation import deprecated - -from rql import BadRQLQuery -from rql.stmts import Union, Select -from rql.nodes import (VariableRef, Comparison, Relation, Constant, Variable, - Not, Exists, SortTerm, Function) - -from cubicweb import server -from cubicweb.utils import make_uid -from cubicweb.rqlrewrite import add_types_restriction, cleanup_solutions -from cubicweb.server.ssplanner import SSPlanner, OneFetchStep -from cubicweb.server.mssteps import * - -Variable._ms_table_key = lambda x: x.name -Relation._ms_table_key = lambda x: x.r_type -# str() Constant.value to ensure generated table name won't be unicode -Constant._ms_table_key = lambda x: str(x.value) - -Variable._ms_may_be_processed = lambda x, terms, linkedterms: any( - t for t in terms if t in linkedterms.get(x, ())) -Relation._ms_may_be_processed = lambda x, terms, linkedterms: all( - getattr(hs, 'variable', hs) in terms for hs in x.get_variable_parts()) - -def ms_scope(term): - rel = None - scope = term.scope - if isinstance(term, Variable) and len(term.stinfo['relations']) == 1: - rel = iter(term.stinfo['relations']).next().relation() - elif isinstance(term, Constant): - rel = term.relation() - elif isinstance(term, Relation): - rel = term - if rel is not None and ( - rel.r_type != 'identity' and rel.scope is scope - and isinstance(rel.parent, Exists) and rel.parent.neged(strict=True)): - return scope.parent.scope - return scope - -def need_intersect(select, getrschema): - for rel in select.iget_nodes(Relation): - if isinstance(rel.parent, Exists) and rel.parent.neged(strict=True) and not rel.is_types_restriction(): - rschema = getrschema(rel.r_type) - if not rschema.final: - # if one of the relation's variable is ambiguous but not - # invariant, an intersection will be necessary - for vref in rel.get_nodes(VariableRef): - var = vref.variable - if (var.valuable_references() == 1 - and len(var.stinfo['possibletypes']) > 1): - return True - return False - -def neged_relation(rel): - parent = rel.parent - return isinstance(parent, Not) or (isinstance(parent, Exists) and - isinstance(parent.parent, Not)) - -def need_source_access_relation(vargraph): - if not vargraph: - return False - # check vargraph contains some other relation than the identity relation - # test of key nature since it may be a variable name (don't care about that) - # or a 2-uple (var1, var2) associated to the relation to traverse to go from - # var1 to var2 - return any(key for key, val in vargraph.iteritems() - if isinstance(key, tuple) and val != 'identity') - -def need_aggr_step(select, sources, stepdefs=None): - """return True if a temporary table is necessary to store some partial - results to execute the given query - """ - if len(sources) == 1: - # can do everything at once with a single source - return False - if select.orderby or select.groupby or select.has_aggregat: - # if more than one source, we need a temp table to deal with sort / - # groups / aggregat if : - # * the rqlst won't be splitted (in the other case the last query - # using partial temporary table can do sort/groups/aggregat without - # the need for a later AggrStep) - # * the rqlst is splitted in multiple steps and there are more than one - # final step - if stepdefs is None: - return True - has_one_final = False - fstepsolindices = set() - for stepdef in stepdefs: - if stepdef[-1]: - if has_one_final or frozenset(stepdef[2]) != fstepsolindices: - return True - has_one_final = True - else: - fstepsolindices.update(stepdef[2]) - return False - -def select_group_sort(select): # XXX something similar done in rql2sql - # add variables used in groups and sort terms to the selection - # if necessary - if select.groupby: - for vref in select.groupby: - if not vref in select.selection: - select.append_selected(vref.copy(select)) - for sortterm in select.orderby: - for vref in sortterm.iget_nodes(VariableRef): - if not vref in select.get_selected_variables(): - # we can't directly insert sortterm.term because it references - # a variable of the select before the copy. - # XXX if constant term are used to define sort, their value - # may necessite a decay - select.append_selected(vref.copy(select)) - if select.groupby and not vref in select.groupby: - select.add_group_var(vref.copy(select)) - -def allequals(solutions): - """return true if all solutions are identical""" - sol = solutions.next() - noconstsol = None - for sol_ in solutions: - if sol_ != sol: - return False - return True - -# XXX move functions below to rql ############################################## - -def is_ancestor(n1, n2): - """return True if n2 is a parent scope of n1""" - p = n1.parent - while p is not None: - if p is n2: - return True - p = p.parent - return False - -def copy_node(newroot, node, subparts=()): - newnode = node.__class__(*node.initargs(newroot)) - for part in subparts: - newnode.append(part) - return newnode - -def used_in_outer_scope(var, scope): - """return true if the variable is used in an outer scope of the given scope - """ - for rel in var.stinfo['relations']: - rscope = ms_scope(rel) - if not rscope is scope and is_ancestor(scope, rscope): - return True - return False - -################################################################################ - -class PartPlanInformation(object): - """regroups necessary information to execute some part of a "global" rql - query ("global" means as received by the querier, which may result in - several internal queries, e.g. parts, due to security insertions). Actually - a PPI is created for each subquery and for each query in a union. - - It exposes as well some methods helping in executing this part on a - multi-sources repository, modifying its internal structure during the - process. - - :attr plan: - the execution plan - :attr rqlst: - the original rql syntax tree handled by this part - - :attr needsplit: - bool telling if the query has to be split into multiple steps for - execution or if it can be executed at once - - :attr temptable: - a SQL temporary table name or None, if necessary to handle aggregate / - sorting for this part of the query - - :attr finaltable: - a SQL table name or None, if results for this part of the query should be - written into a temporary table (usually shared by multiple PPI) - - :attr sourcesterms: - a dictionary {source : {term: set([solution index, ])}} telling for each - source which terms are supported for which solutions. A "term" may be - either a rql Variable, Constant or Relation node. - """ - def __init__(self, plan, rqlst, rqlhelper=None): - self.plan = plan - self.rqlst = rqlst - self.needsplit = False - self.temptable = None - self.finaltable = None - # shortcuts - self._schema = plan.schema - self._session = plan.session - self._repo = self._session.repo - self._solutions = rqlst.solutions - self._solindices = range(len(self._solutions)) - self.system_source = self._repo.system_source - # source : {term: [solution index, ]} - self.sourcesterms = self._sourcesterms = {} - # source : {relation: set(child variable and constant)} - self._crossrelations = {} - # term : set(sources) - self._discarded_sources = {} - # dictionary of variables and constants which are linked to each other - # using a non final relation supported by multiple sources (crossed or - # not). - self._linkedterms = {} - # processing - termssources = self._compute_sourcesterms() - self._remove_invalid_sources(termssources) - self._compute_needsplit() - # after initialisation, .sourcesterms contains the same thing as - # ._sourcesterms though during plan construction, ._sourcesterms will - # be modified while .sourcesterms will be kept unmodified - self.sourcesterms = {} - for k, v in self._sourcesterms.iteritems(): - self.sourcesterms[k] = {} - for k2, v2 in v.iteritems(): - self.sourcesterms[k][k2] = v2.copy() - # cleanup linked var - for var, linkedrelsinfo in self._linkedterms.iteritems(): - self._linkedterms[var] = frozenset(x[0] for x in linkedrelsinfo) - # map output of a step to input of a following step - self._inputmaps = {} - # record input map conflicts to resolve them on final step generation - self._conflicts = [] - if rqlhelper is not None: # else test - self._insert_identity_variable = rqlhelper._annotator.rewrite_shared_optional - if server.DEBUG & server.DBG_MS: - print 'sourcesterms:' - self._debug_sourcesterms() - - def _debug_sourcesterms(self): - for source in self._sourcesterms: - print '-', source - for term, sols in self._sourcesterms[source].items(): - print ' -', term, id(term), ':', sols - - def copy_solutions(self, solindices): - return [self._solutions[solidx].copy() for solidx in solindices] - - @property - @cached - def part_sources(self): - if self._sourcesterms: - return tuple(sorted(self._sourcesterms)) - return (self.system_source,) - - @property - @cached - def _sys_source_set(self): - return frozenset((self.system_source, solindex) - for solindex in self._solindices) - - @cached - def _norel_support_set(self, relation): - """return a set of (source, solindex) where source doesn't support the - relation - """ - return frozenset((source, solidx) for source in self._repo.sources - for solidx in self._solindices - if not ((source.support_relation(relation.r_type)) - or relation.r_type in source.dont_cross_relations)) - - def _compute_sourcesterms(self): - """compute for each term (variable, rewritten constant, relation) and - for each solution in the rqlst which sources support them - """ - repo = self._repo - eschema = self._schema.eschema - sourcesterms = self._sourcesterms - # find for each source which variable/solution are supported - for varname, varobj in self.rqlst.defined_vars.items(): - # if variable has an eid specified, we can get its source directly - # NOTE: use uidrel and not constnode to deal with "X eid IN(1,2,3,4)" - if varobj.stinfo['uidrel'] is not None: - rel = varobj.stinfo['uidrel'] - hasrel = len(varobj.stinfo['relations']) > 1 - for const in rel.children[1].get_nodes(Constant): - eid = const.eval(self.plan.args) - source = self._session.source_from_eid(eid) - if (source is self.system_source - or (hasrel and varobj._q_invariant and - not any(source.support_relation(r.r_type) - for r in varobj.stinfo['relations'] - if not r is rel))): - self._set_source_for_term(self.system_source, varobj) - else: - self._set_source_for_term(source, varobj) - continue - rels = varobj.stinfo['relations'] - if not rels and varobj.stinfo['typerel'] is None: - # (rare) case where the variable has no type specified nor - # relation accessed ex. "Any MAX(X)" - self._set_source_for_term(self.system_source, varobj) - continue - for i, sol in enumerate(self._solutions): - vartype = sol[varname] - # skip final variable - if eschema(vartype).final: - break - for source in repo.sources: - if source.support_entity(vartype): - # the source support the entity type, though we will - # actually have to fetch from it only if - # * the variable isn't invariant - # * at least one supported relation specified - if not varobj._q_invariant or \ - any(imap(source.support_relation, - (r.r_type for r in rels if r.r_type not in ('identity', 'eid')))): - sourcesterms.setdefault(source, {}).setdefault(varobj, set()).add(i) - # if variable is not invariant and is used by a relation - # not supported by this source, we'll have to split the - # query - if not varobj._q_invariant and any(ifilterfalse( - source.support_relation, (r.r_type for r in rels))): - self.needsplit = True - # add source for rewritten constants to sourcesterms - self._const_vars = {} - for vconsts in self.rqlst.stinfo['rewritten'].itervalues(): - # remember those consts come from the same variable - for const in vconsts: - self._const_vars[const] = vconsts - source = self._session.source_from_eid(const.eval(self.plan.args)) - if source is self.system_source: - for const in vconsts: - self._set_source_for_term(source, const) - elif not self._sourcesterms: - for const in vconsts: - self._set_source_for_term(source, const) - elif source in self._sourcesterms: - source_scopes = frozenset(ms_scope(t) for t in self._sourcesterms[source]) - for const in vconsts: - if ms_scope(const) in source_scopes: - self._set_source_for_term(source, const) - # if system source is used, add every rewritten constant - # to its supported terms even when associated entity - # doesn't actually come from it so we get a changes that - # allequals will return True as expected when computing - # needsplit - # check const is used in a relation restriction - if const.relation() and self.system_source in sourcesterms: - self._set_source_for_term(self.system_source, const) - # add source for relations - rschema = self._schema.rschema - termssources = {} - sourcerels = [] - for rel in self.rqlst.iget_nodes(Relation): - # process non final relations only - # note: don't try to get schema for 'is' relation (not available - # during bootstrap) - if rel.r_type == 'cw_source': - sourcerels.append(rel) - if not (rel.is_types_restriction() or rschema(rel.r_type).final): - # nothing to do if relation is not supported by multiple sources - # or if some source has it listed in its cross_relations - # attribute - # - # XXX code below don't deal if some source allow relation - # crossing but not another one - relsources = [s for s in repo.rel_type_sources(rel.r_type) - if s is self.system_source - or s in self._sourcesterms] - if len(relsources) < 2: - # filter out sources being there because they have this - # relation in their dont_cross_relations attribute - relsources = [source for source in relsources - if source.support_relation(rel.r_type)] - if relsources: - # this means the relation is using a variable inlined as - # a constant and another unsupported variable, in which - # case we put the relation in sourcesterms - self._sourcesterms.setdefault(relsources[0], {})[rel] = set(self._solindices) - continue - lhs, rhs = rel.get_variable_parts() - lhsv, rhsv = getattr(lhs, 'variable', lhs), getattr(rhs, 'variable', rhs) - # update dictionary of sources supporting lhs and rhs vars - if not lhsv in termssources: - termssources[lhsv] = self._term_sources(lhs) - if not rhsv in termssources: - termssources[rhsv] = self._term_sources(rhs) - self._handle_cross_relation(rel, relsources, termssources) - self._linkedterms.setdefault(lhsv, set()).add((rhsv, rel)) - self._linkedterms.setdefault(rhsv, set()).add((lhsv, rel)) - # extract information from cw_source relation - for srel in sourcerels: - vref = srel.children[1].children[0] - sourceeids, sourcenames = [], [] - if isinstance(vref, Constant): - # simplified variable - sourceeids = None, (vref.eval(self.plan.args),) - var = vref - else: - var = vref.variable - for rel in var.stinfo['relations'] - var.stinfo['rhsrelations']: - # skip neged eid relation since it's the kind of query - # generated when clearing old value of '?1" relation, - # cw_source included. See - # unittest_ldapuser.test_copy_to_system_source - if rel.r_type == 'name' or \ - (rel.r_type == 'eid' and not rel.neged(strict=True)): - if rel.r_type == 'eid': - slist = sourceeids - else: - slist = sourcenames - sources = [cst.eval(self.plan.args) - for cst in rel.children[1].get_nodes(Constant)] - if sources: - if slist: - # don't attempt to do anything - sourcenames = sourceeids = None - break - slist[:] = (rel, sources) - if sourceeids: - rel, values = sourceeids - sourcesdict = self._repo.sources_by_eid - elif sourcenames: - rel, values = sourcenames - sourcesdict = self._repo.sources_by_uri - else: - sourcesdict = None - if sourcesdict is not None: - lhs = srel.children[0] - try: - sources = [sourcesdict[key] for key in values] - except KeyError: - raise BadRQLQuery('source conflict for term %s' % lhs.as_string()) - if isinstance(lhs, Constant): - source = self._session.source_from_eid(lhs.eval(self.plan.args)) - if not source in sources: - raise BadRQLQuery('source conflict for term %s' % lhs.as_string()) - else: - lhs = getattr(lhs, 'variable', lhs) - invariant = getattr(lhs, '_q_invariant', False) - # XXX NOT NOT - neged = srel.neged(traverse_scope=True) or (rel and rel.neged(strict=True)) - has_copy_based_source = False - sources_ = [] - for source in sources: - if source.copy_based_source: - has_copy_based_source = True - if not self.system_source in sources_: - sources_.append(self.system_source) - else: - sources_.append(source) - sources = sources_ - if neged: - for source in sources: - if invariant and source is self.system_source: - continue - self._remove_source_term(source, lhs) - self._discarded_sources.setdefault(lhs, set()).add(source) - usesys = self.system_source not in sources - else: - for source, terms in sourcesterms.items(): - if lhs in terms and not source in sources: - if invariant and source is self.system_source: - continue - self._remove_source_term(source, lhs) - self._discarded_sources.setdefault(lhs, set()).add(source) - usesys = self.system_source in sources - if rel is None or (len(var.stinfo['relations']) == 2 and - not var.stinfo['selected']): - self._remove_source_term(self.system_source, var) - if not (has_copy_based_source or len(sources) > 1 - or usesys or invariant): - if rel is None: - srel.parent.remove(srel) - else: - self.rqlst.undefine_variable(var) - self._remove_source_term(self.system_source, srel) - return termssources - - def _handle_cross_relation(self, rel, relsources, termssources): - for source in relsources: - if rel.r_type in source.cross_relations: - ssource = self.system_source - crossvars = set(x.variable for x in rel.get_nodes(VariableRef)) - for const in rel.get_nodes(Constant): - if source.uri != 'system' and not const in self._sourcesterms.get(source, ()): - continue - crossvars.add(const) - self._crossrelations.setdefault(source, {})[rel] = crossvars - if len(crossvars) < 2: - # this means there is a constant in the relation which is - # not supported by the source, so we can stop here - continue - self._sourcesterms.setdefault(ssource, {})[rel] = set(self._solindices) - solindices = None - for term in crossvars: - if len(termssources[term]) == 1 and iter(termssources[term]).next()[0].uri == 'system': - for ov in crossvars: - if ov is not term and (isinstance(ov, Constant) or ov._q_invariant): - ssset = frozenset((ssource,)) - self._remove_sources(ov, termssources[ov] - ssset) - break - if solindices is None: - solindices = set(sol for s, sol in termssources[term] - if s is source) - else: - solindices &= set(sol for s, sol in termssources[term] - if s is source) - else: - self._sourcesterms.setdefault(source, {})[rel] = solindices - - def _remove_invalid_sources(self, termssources): - """removes invalid sources from `sourcesterms` member according to - traversed relations and their properties (which sources support them, - can they cross sources, etc...) - """ - for term in self._linkedterms: - self._remove_sources_until_stable(term, termssources) - if len(self._sourcesterms) > 1 and hasattr(self.plan.rqlst, 'main_relations'): - # the querier doesn't annotate write queries, need to do it here - self.plan.annotate_rqlst() - # insert/update/delete queries, we may get extra information from - # the main relation (eg relations to the left of the WHERE - if self.plan.rqlst.TYPE == 'insert': - inserted = dict((vref.variable, etype) - for etype, vref in self.plan.rqlst.main_variables) - else: - inserted = {} - repo = self._repo - rschema = self._schema.rschema - for rel in self.plan.rqlst.main_relations: - if not rschema(rel.r_type).final: - # nothing to do if relation is not supported by multiple sources - if len(repo.rel_type_sources(rel.r_type)) < 2: - continue - lhs, rhs = rel.get_variable_parts() - try: - lhsv = self._extern_term(lhs, termssources, inserted) - rhsv = self._extern_term(rhs, termssources, inserted) - except KeyError: - continue - self._remove_term_sources(lhsv, rel, rhsv, termssources) - self._remove_term_sources(rhsv, rel, lhsv, termssources) - - def _extern_term(self, term, termssources, inserted): - var = term.variable - if var.stinfo['constnode']: - termv = var.stinfo['constnode'] - termssources[termv] = self._term_sources(termv) - elif var in inserted: - termv = var - source = self._repo.locate_etype_source(inserted[var]) - termssources[termv] = set((source, solindex) - for solindex in self._solindices) - else: - termv = self.rqlst.defined_vars[var.name] - if not termv in termssources: - termssources[termv] = self._term_sources(termv) - return termv - - def _remove_sources_until_stable(self, term, termssources): - sourcesterms = self._sourcesterms - for oterm, rel in self._linkedterms.get(term, ()): - tscope = ms_scope(term) - otscope = ms_scope(oterm) - rscope = ms_scope(rel) - if not tscope is otscope and rscope.neged(strict=True): - # can't get information from relation inside a NOT exists - # where terms don't belong to the same scope - continue - need_ancestor_scope = False - if not (tscope is rscope and otscope is rscope): - if rel.ored(): - continue - if rel.ored(traverse_scope=True): - # if relation has some OR as parent, constraints should only - # propagate from parent scope to child scope, nothing else - need_ancestor_scope = True - relsources = self._repo.rel_type_sources(rel.r_type) - if neged_relation(rel) and ( - len(relsources) < 2 - or not isinstance(oterm, Variable) - or oterm.valuable_references() != 1 - or any(sourcesterms[source][term] != sourcesterms[source][oterm] - for source in relsources - if term in sourcesterms.get(source, ()) - and oterm in sourcesterms.get(source, ()))): - # neged relation doesn't allow to infer term sources unless - # we're on a multisource relation for a term only used by this - # relation (eg "Any X WHERE NOT X multisource_rel Y" and over is - # Y) - continue - # compute invalid sources for terms and remove them - if not need_ancestor_scope or is_ancestor(tscope, otscope): - self._remove_term_sources(term, rel, oterm, termssources) - if not need_ancestor_scope or is_ancestor(otscope, tscope): - self._remove_term_sources(oterm, rel, term, termssources) - - def _remove_term_sources(self, term, rel, oterm, termssources): - """remove invalid sources for term according to oterm's sources and the - relation between those two terms. - """ - norelsup = self._norel_support_set(rel) - termsources = termssources[term] - invalid_sources = termsources - (termssources[oterm] | norelsup) - if invalid_sources and self._repo.can_cross_relation(rel.r_type): - invalid_sources -= self._sys_source_set - if invalid_sources and isinstance(term, Variable) \ - and self._need_ext_source_access(term, rel): - # if the term is a not invariant variable, we should filter out - # source where the relation is a cross relation from invalid - # sources - invalid_sources = frozenset((s, solidx) for s, solidx in invalid_sources - if not (s in self._crossrelations and - rel in self._crossrelations[s])) - if invalid_sources: - self._remove_sources(term, invalid_sources) - discarded = self._discarded_sources.get(term) - if discarded is not None and not any(x[0] for x in (termsources-invalid_sources) - if not x[0] in discarded): - raise BadRQLQuery('relation %s cant be crossed but %s and %s should ' - 'come from difference sources' % - (rel.r_type, term.as_string(), oterm.as_string())) - # if term is a rewritten const, we can apply the same changes to - # all other consts inserted from the same original variable - for const in self._const_vars.get(term, ()): - if const is not term: - self._remove_sources(const, invalid_sources) - termsources -= invalid_sources - self._remove_sources_until_stable(term, termssources) - if isinstance(oterm, Constant): - self._remove_sources(oterm, invalid_sources) - - def _compute_needsplit(self): - """tell according to sourcesterms if the rqlst has to be splitted for - execution among multiple sources - - the execution has to be split if - * a source support an entity (non invariant) but doesn't support a - relation on it - * a source support an entity which is accessed by an optional relation - * there is more than one source and either all sources'supported - variable/solutions are not equivalent or multiple variables have to - be fetched from some source - """ - # NOTE: < 2 since may be 0 on queries such as Any X WHERE X eid 2 - if len(self._sourcesterms) < 2: - self.needsplit = False - # if this is not the system source but we have only constant terms - # and no relation (other than eid), apply query on the system source - # - # testing for rqlst with nothing in vargraph nor defined_vars is the - # simplest way the check the condition explained below - if not self.system_source in self._sourcesterms and \ - not self.rqlst.defined_vars and \ - not need_source_access_relation(self.rqlst.vargraph): - self._sourcesterms = {self.system_source: {}} - elif not self.needsplit: - if not allequals(self._sourcesterms.itervalues()): - for source, terms in self._sourcesterms.iteritems(): - if source is self.system_source: - continue - if any(x for x in terms if not isinstance(x, Constant)): - self.needsplit = True - return - self._sourcesterms = {self.system_source: {}} - self.needsplit = False - else: - sample = self._sourcesterms.itervalues().next() - if len(sample) > 1: - for term in sample: - # need split if unlinked variable - if isinstance(term, Variable) and not term in self._linkedterms: - self.needsplit = True - break - else: - # need split if there are some cross relation on non - # invariant variable or if the variable is used in - # multi-sources relation - if self._crossrelations: - for reldict in self._crossrelations.itervalues(): - for rel, terms in reldict.iteritems(): - for term in terms: - if isinstance(term, Variable) \ - and self._need_ext_source_access(term, rel): - self.needsplit = True - return - else: - # remove sources only accessing to constant nodes - for source, terms in self._sourcesterms.items(): - if source is self.system_source: - continue - if not any(x for x in terms if not isinstance(x, Constant)): - del self._sourcesterms[source] - if len(self._sourcesterms) < 2: - self.needsplit = False - - @cached - def _need_ext_source_access(self, var, rel): - if not var._q_invariant: - return True - if any(r for x, r in self._linkedterms[var] - if not r is rel and self._repo.is_multi_sources_relation(r.r_type)): - return True - return False - - def _set_source_for_term(self, source, term): - self._sourcesterms.setdefault(source, {})[term] = set(self._solindices) - - def _term_sources(self, term): - """returns possible sources for terms `term`""" - if isinstance(term, Constant): - source = self._session.source_from_eid(term.eval(self.plan.args)) - return set((source, solindex) for solindex in self._solindices) - else: - var = getattr(term, 'variable', term) - sources = [source for source, varobjs in self.sourcesterms.iteritems() - if var in varobjs] - return set((source, solindex) for source in sources - for solindex in self.sourcesterms[source][var]) - - def _remove_sources(self, term, sources): - """removes invalid sources (`sources`) from `sourcesterms` - - :param sources: the list of sources to remove - :param term: the analyzed term - """ - sourcesterms = self._sourcesterms - for source, solindex in sources: - try: - sourcesterms[source][term].remove(solindex) - except KeyError: - import rql.base as rqlb - assert isinstance(term, (rqlb.BaseNode, Variable)), repr(term) - continue # may occur with subquery column alias - if not sourcesterms[source][term]: - self._remove_source_term(source, term) - - def _remove_source_term(self, source, term): - try: - poped = self._sourcesterms[source].pop(term, None) - except KeyError: - pass - else: - if not self._sourcesterms[source]: - del self._sourcesterms[source] - - def crossed_relation(self, source, relation): - return relation in self._crossrelations.get(source, ()) - - def part_steps(self): - """precompute necessary part steps before generating actual rql for - each step. This is necessary to know if an aggregate step will be - necessary or not. - """ - steps = [] - select = self.rqlst - rschema = self._schema.rschema - for source in self.part_sources: - try: - sourceterms = self._sourcesterms[source] - except KeyError: - continue # already proceed - while sourceterms: - # take a term randomly, and all terms supporting the - # same solutions - term, solindices = self._choose_term(source, sourceterms) - if source.uri == 'system': - # ensure all variables are available for the latest step - # (missing one will be available from temporary tables - # of previous steps) - scope = select - terms = scope.defined_vars.values() + scope.aliases.values() - sourceterms.clear() - sources = [source] - else: - scope = ms_scope(term) - # find which sources support the same term and solutions - sources = self._expand_sources(source, term, solindices) - # no try to get as much terms as possible - terms = self._expand_terms(term, sources, sourceterms, - scope, solindices) - if len(terms) == 1 and isinstance(terms[0], Constant): - # we can't generate anything interesting with a single - # constant term (will generate an empty "Any" query), - # go to the next iteration directly! - continue - if not sourceterms: - try: - del self._sourcesterms[source] - except KeyError: - # XXX already cleaned - pass - # set of terms which should be additionaly selected when - # possible - needsel = set() - if not self._sourcesterms and scope is select: - terms += scope.defined_vars.values() + scope.aliases.values() - if isinstance(term, Relation) and len(sources) > 1: - variants = set() - partterms = [term] - for vref in term.get_nodes(VariableRef): - if not vref.variable._q_invariant: - variants.add(vref.name) - if len(variants) == 2: - # we need an extra-step to fetch relations from each source - # before a join with prefetched inputs - # (see test_crossed_relation_noeid_needattr in - # unittest_msplanner / unittest_multisources) - lhs, rhs = term.get_variable_parts() - steps.append( (sources, [term, getattr(lhs, 'variable', lhs), - getattr(rhs, 'variable', rhs)], - solindices, scope, variants, False) ) - sources = [self.system_source] - final = True - else: - # suppose this is a final step until the contrary is proven - final = scope is select - # add attribute variables and mark variables which should be - # additionaly selected when possible - for var in select.defined_vars.itervalues(): - if not var in terms: - stinfo = var.stinfo - for ovar, rtype in stinfo.get('attrvars', ()): - if ovar in terms: - needsel.add(var.name) - terms.append(var) - break - else: - needsel.add(var.name) - final = False - # check all relations are supported by the sources - for rel in scope.iget_nodes(Relation): - if rel.is_types_restriction(): - continue - # take care not overwriting the existing "source" identifier - for _source in sources: - if not _source.support_relation(rel.r_type) or ( - self.crossed_relation(_source, rel) and not rel in terms): - for vref in rel.iget_nodes(VariableRef): - needsel.add(vref.name) - final = False - break - else: - if not scope is select: - self._exists_relation(rel, terms, needsel, source) - # if relation is supported by all sources and some of - # its lhs/rhs variable isn't in "terms", and the - # other end *is* in "terms", mark it have to be - # selected - if source.uri != 'system' and not rschema(rel.r_type).final: - lhs, rhs = rel.get_variable_parts() - try: - lhsvar = lhs.variable - except AttributeError: - lhsvar = lhs - try: - rhsvar = rhs.variable - except AttributeError: - rhsvar = rhs - try: - if lhsvar in terms and not rhsvar in terms: - needsel.add(lhsvar.name) - elif rhsvar in terms and not lhsvar in terms: - needsel.add(rhsvar.name) - except AttributeError: - continue # not an attribute, no selection needed - if final and source.uri != 'system': - # check rewritten constants - for vconsts in select.stinfo['rewritten'].itervalues(): - const = vconsts[0] - eid = const.eval(self.plan.args) - _source = self._session.source_from_eid(eid) - if len(sources) > 1 or not _source in sources: - # if there is some rewriten constant used by a not - # neged relation while there are some source not - # supporting the associated entity, this step can't - # be final (unless the relation is explicitly in - # `terms`, eg cross relations) - for c in vconsts: - rel = c.relation() - if rel is None or not (rel in terms or neged_relation(rel)): - final = False - break - break - if final: - self._cleanup_sourcesterms(sources, solindices) - steps.append((sources, terms, solindices, scope, needsel, final) - ) - if not steps[-1][-1]: - # add a final step - terms = select.defined_vars.values() + select.aliases.values() - steps.append( ([self.system_source], terms, set(self._solindices), - select, set(), True) ) - return steps - - def _exists_relation(self, rel, terms, needsel, source): - rschema = self._schema.rschema(rel.r_type) - lhs, rhs = rel.get_variable_parts() - try: - lhsvar, rhsvar = lhs.variable, rhs.variable - except AttributeError: - pass - else: - # supported relation with at least one end supported, check the - # other end is in as well. If not this usually means the - # variable is refed by an outer scope and should be substituted - # using an 'identity' relation (else we'll get a conflict of - # temporary tables) - relscope = ms_scope(rel) - lhsscope = ms_scope(lhsvar) - rhsscope = ms_scope(rhsvar) - if rhsvar in terms and not lhsvar in terms and lhsscope is lhsvar.stmt: - self._identity_substitute(rel, lhsvar, terms, needsel, relscope) - elif lhsvar in terms and not rhsvar in terms and rhsscope is rhsvar.stmt: - self._identity_substitute(rel, rhsvar, terms, needsel, relscope) - elif self.crossed_relation(source, rel): - if lhsscope is not relscope: - self._identity_substitute(rel, lhsvar, terms, needsel, - relscope, lhsscope) - if rhsscope is not relscope: - self._identity_substitute(rel, rhsvar, terms, needsel, - relscope, rhsscope) - - def _identity_substitute(self, relation, var, terms, needsel, exist, - idrelscope=None): - newvar = self._insert_identity_variable(exist, var, idrelscope) - # ensure relation is using '=' operator, else we rely on a - # sqlgenerator side effect (it won't insert an inequality operator - # in this case) - relation.children[1].operator = '=' - terms.append(newvar) - needsel.add(newvar.name) - - def _choose_term(self, source, sourceterms): - """pick one term among terms supported by a source, which will be used - as a base to generate an execution step - """ - secondchoice = None - if len(self._sourcesterms) > 1: - # first, return non invariant variable of crossed relation, then the - # crossed relation itself - for term in sourceterms: - if (isinstance(term, Relation) - and self.crossed_relation(source, term) - and not ms_scope(term) is self.rqlst): - for vref in term.get_variable_parts(): - try: - var = vref.variable - except AttributeError: - # Constant - continue - if ((len(var.stinfo['relations']) > 1 or var.stinfo['selected']) - and var in sourceterms): - return var, sourceterms.pop(var) - return term, sourceterms.pop(term) - # priority to variable from subscopes - for term in sourceterms: - if not ms_scope(term) is self.rqlst: - if isinstance(term, Variable): - return term, sourceterms.pop(term) - secondchoice = term - else: - # priority to variable from outer scope - for term in sourceterms: - if ms_scope(term) is self.rqlst: - if isinstance(term, Variable): - return term, sourceterms.pop(term) - secondchoice = term - if secondchoice is not None: - return secondchoice, sourceterms.pop(secondchoice) - # priority to variable with the less solutions supported and with the - # most valuable refs. Add variable name for test predictability - variables = sorted([(var, sols) for (var, sols) in sourceterms.items() - if isinstance(var, Variable)], - key=lambda (v, s): (len(s), -v.valuable_references(), v.name)) - if variables: - var = variables[0][0] - return var, sourceterms.pop(var) - # priority to constant - for term in sourceterms: - if isinstance(term, Constant): - return term, sourceterms.pop(term) - # whatever (relation) - term = iter(sourceterms).next() - return term, sourceterms.pop(term) - - def _expand_sources(self, selected_source, term, solindices): - """return all sources supporting given term / solindices""" - sources = [selected_source] - sourcesterms = self._sourcesterms - for source in list(sourcesterms): - if source is selected_source: - continue - if not (term in sourcesterms[source] and - solindices.issubset(sourcesterms[source][term])): - continue - sources.append(source) - if source.uri != 'system' or not (isinstance(term, Variable) and not term in self._linkedterms): - termsolindices = sourcesterms[source][term] - termsolindices -= solindices - if not termsolindices: - del sourcesterms[source][term] - if not sourcesterms[source]: - del sourcesterms[source] - return sources - - def _expand_terms(self, term, sources, sourceterms, scope, solindices): - terms = [term] - sources = sorted(sources) - sourcesterms = self._sourcesterms - linkedterms = self._linkedterms - # term has to belong to the same scope if there is more - # than the system source remaining - if len(sourcesterms) > 1 and not scope is self.rqlst: - candidates = (t for t in sourceterms if scope is ms_scope(t)) - else: - candidates = sourceterms - # we only want one unlinked term in each generated query - candidates = [t for t in candidates - if isinstance(t, (Constant, Relation)) or - (solindices.issubset(sourceterms[t]) and t in linkedterms)] - cross_rels = {} - for source in sources: - cross_rels.update(self._crossrelations.get(source, {})) - exclude = {} - for crossvars in cross_rels.itervalues(): - vars = [t for t in crossvars if isinstance(t, Variable)] - try: - exclude[vars[0]] = vars[1] - exclude[vars[1]] = vars[0] - except IndexError: - pass - accept_term = lambda x: (not any(s for s in sources - if not x in sourcesterms.get(s, ())) - and x._ms_may_be_processed(terms, linkedterms) - and not exclude.get(x) in terms) - if isinstance(term, Relation) and term in cross_rels: - cross_terms = cross_rels.pop(term) - base_accept_term = accept_term - accept_term = lambda x: (base_accept_term(x) or x in cross_terms) - for refed in cross_terms: - if not refed in candidates: - terms.append(refed) - # repeat until no term can't be added, since addition of a new - # term may permit to another one to be added - modified = True - while modified and candidates: - modified = False - for term in candidates[:]: - if isinstance(term, Constant): - termsources = set(x[0] for x in self._term_sources(term)) - # ensure system source is there for constant - if self.system_source in sources: - termsources.add(self.system_source) - if sorted(termsources) != sources: - continue - terms.append(term) - candidates.remove(term) - modified = True - del sourceterms[term] - elif accept_term(term): - terms.append(term) - candidates.remove(term) - modified = True - self._cleanup_sourcesterms(sources, solindices, term) - return terms - - def _cleanup_sourcesterms(self, sources, solindices, term=None): - """remove solutions so we know they are already processed""" - for source in sources: - try: - sourceterms = self._sourcesterms[source] - except KeyError: - continue - if term is None: - for term, termsolindices in sourceterms.items(): - if isinstance(term, Relation) and self.crossed_relation(source, term): - continue - termsolindices -= solindices - if not termsolindices: - del sourceterms[term] - else: - try: - sourceterms[term] -= solindices - if not sourceterms[term]: - del sourceterms[term] - except KeyError: - pass - #assert term in cross_terms - if not sourceterms: - del self._sourcesterms[source] - - def merge_input_maps(self, allsolindices, complete=True): - """inputmaps is a dictionary with tuple of solution indices as key with - an associated input map as value. This function compute for each - solution its necessary input map and return them grouped - - ex: - inputmaps = {(0, 1, 2): {'A': 't1.login1', 'U': 't1.C0', 'U.login': 't1.login1'}, - (1,): {'X': 't2.C0', 'T': 't2.C1'}} - return : [([1], {'A': 't1.login1', 'U': 't1.C0', 'U.login': 't1.login1', - 'X': 't2.C0', 'T': 't2.C1'}), - ([0,2], {'A': 't1.login1', 'U': 't1.C0', 'U.login': 't1.login1'})] - """ - if not self._inputmaps: - return [(allsolindices, None)] - _allsolindices = allsolindices.copy() - mapbysol = {} - # compute a single map for each solution - for solindices, basemap in self._inputmaps.iteritems(): - for solindex in solindices: - if not (complete or solindex in allsolindices): - continue - solmap = mapbysol.setdefault(solindex, {}) - solmap.update(basemap) - try: - _allsolindices.remove(solindex) - except KeyError: - continue # already removed - # group results by identical input map - result = [] - for solindex, solmap in mapbysol.iteritems(): - for solindices, commonmap in result: - if commonmap == solmap: - solindices.append(solindex) - break - else: - result.append( ([solindex], solmap) ) - if _allsolindices: - result.append( (list(_allsolindices), None) ) - return result - - def build_final_part(self, select, solindices, inputmap, sources, - insertedvars): - solutions = [self._solutions[i] for i in solindices] - if self._conflicts and inputmap: - for varname, mappedto in self._conflicts: - var = select.defined_vars[varname] - newvar = select.make_variable() - # XXX should use var.scope but scope hasn't been computed yet - select.add_relation(var, 'identity', newvar) - for sol in solutions: - sol[newvar.name] = sol[varname] - inputmap[newvar.name] = mappedto - rqlst = self.plan.finalize(select, solutions, insertedvars) - if self.temptable is None and self.finaltable is None: - return OneFetchStep(self.plan, rqlst, sources, inputmap=inputmap) - table = self.temptable or self.finaltable - return FetchStep(self.plan, rqlst, sources, table, True, inputmap) - - def build_non_final_part(self, select, solindices, sources, insertedvars, - table): - """non final step, will have to store results in a temporary table""" - inputmapkey = tuple(sorted(solindices)) - solutions = [self._solutions[i] for i in solindices] - # XXX be smarter vs rql comparison - idx_key = (select.as_string(), inputmapkey, - tuple(sorted(sources)), tuple(sorted(insertedvars))) - try: - # if a similar step has already been process, simply backport its - # input map - step = self.plan.ms_steps_idx[idx_key] - except KeyError: - # processing needed - rqlst = self.plan.finalize(select, solutions, insertedvars) - step = FetchStep(self.plan, rqlst, sources, table, False) - self.plan.ms_steps_idx[idx_key] = step - self.plan.add_step(step) - # update input map for following steps, according to processed solutions - inputmap = self._inputmaps.setdefault(inputmapkey, {}) - for varname, mapping in step.outputmap.iteritems(): - if varname in inputmap and not '.' in varname and \ - not (mapping == inputmap[varname] or - self._schema.eschema(solutions[0][varname]).final): - self._conflicts.append((varname, inputmap[varname])) - inputmap.update(step.outputmap) - - -@deprecated('[3.18] old multi-source system will go away in the next version') -class MSPlanner(SSPlanner): - """MultiSourcesPlanner: build execution plan for rql queries - - decompose the RQL query according to sources'schema - """ - - def build_select_plan(self, plan, rqlst): - """build execution plan for a SELECT RQL query - - the rqlst should not be tagged at this point - """ - # preprocess deals with security insertion and returns a new syntax tree - # which have to be executed to fulfill the query: according - # to permissions for variable's type, different rql queries may have to - # be executed - plan.preprocess(rqlst) - if server.DEBUG & server.DBG_MS: - print '-'*80 - print 'PLANNING', rqlst - ppis = [PartPlanInformation(plan, select, self.rqlhelper) - for select in rqlst.children] - plan.ms_steps_idx = {} - steps = self._union_plan(plan, ppis) - if server.DEBUG & server.DBG_MS: - from pprint import pprint - for step in plan.steps: - pprint(step.test_repr()) - pprint(steps[0].test_repr()) - return steps - - def _ppi_subqueries(self, ppi): - # part plan info for subqueries - plan = ppi.plan - inputmap = {} - for subquery in ppi.rqlst.with_[:]: - sppis = [PartPlanInformation(plan, select) - for select in subquery.query.children] - for sppi in sppis: - if sppi.needsplit or sppi.part_sources != ppi.part_sources: - temptable = plan.make_temp_table_name('T%s' % make_uid(id(subquery))) - sstep = self._union_plan(plan, sppis, temptable)[0] - break - else: - sstep = None - if sstep is not None: - ppi.rqlst.with_.remove(subquery) - for i, colalias in enumerate(subquery.aliases): - inputmap[colalias.name] = '%s.C%s' % (temptable, i) - ppi.plan.add_step(sstep) - return inputmap - - def _union_plan(self, plan, ppis, temptable=None): - tosplit, cango, allsources = [], {}, set() - for planinfo in ppis: - if planinfo.needsplit: - tosplit.append(planinfo) - else: - cango.setdefault(planinfo.part_sources, []).append(planinfo) - for source in planinfo.part_sources: - allsources.add(source) - # first add steps for query parts which doesn't need to splitted - steps = [] - for sources, cppis in cango.iteritems(): - byinputmap = {} - for ppi in cppis: - select = ppi.rqlst - if sources != (ppi.system_source,): - add_types_restriction(self.schema, select) - # part plan info for subqueries - inputmap = self._ppi_subqueries(ppi) - aggrstep = need_aggr_step(select, sources) - if aggrstep: - atemptable = plan.make_temp_table_name('T%s' % make_uid(id(select))) - sunion = Union() - sunion.append(select) - selected = select.selection[:] - select_group_sort(select) - step = AggrStep(plan, selected, select, atemptable, temptable) - step.set_limit_offset(select.limit, select.offset) - select.limit = None - select.offset = 0 - fstep = FetchStep(plan, sunion, sources, atemptable, True, inputmap) - step.children.append(fstep) - steps.append(step) - else: - byinputmap.setdefault(tuple(inputmap.iteritems()), []).append( (select) ) - for inputmap, queries in byinputmap.iteritems(): - inputmap = dict(inputmap) - sunion = Union() - for select in queries: - sunion.append(select) - if temptable: - steps.append(FetchStep(plan, sunion, sources, temptable, True, inputmap)) - else: - steps.append(OneFetchStep(plan, sunion, sources, inputmap)) - # then add steps for splitted query parts - for planinfo in tosplit: - steps.append(self.split_part(planinfo, temptable)) - if len(steps) > 1: - if temptable: - step = UnionFetchStep(plan) - else: - step = UnionStep(plan) - step.children = steps - return (step,) - return steps - - # internal methods for multisources decomposition ######################### - - def split_part(self, ppi, temptable): - ppi.finaltable = temptable - plan = ppi.plan - select = ppi.rqlst - subinputmap = self._ppi_subqueries(ppi) - stepdefs = ppi.part_steps() - if need_aggr_step(select, ppi.part_sources, stepdefs): - atemptable = plan.make_temp_table_name('T%s' % make_uid(id(select))) - selection = select.selection[:] - select_group_sort(select) - else: - atemptable = None - selection = select.selection - ppi.temptable = atemptable - vfilter = TermsFiltererVisitor(self.schema, ppi) - steps = [] - multifinal = len([x for x in stepdefs if x[-1]]) >= 2 - for sources, terms, solindices, scope, needsel, final in stepdefs: - # extract an executable query using only the specified terms - if sources[0].uri == 'system': - # in this case we have to merge input maps before call to - # filter so already processed restriction are correctly - # removed - solsinputmaps = ppi.merge_input_maps( - solindices, complete=not (final and multifinal)) - for solindices, inputmap in solsinputmaps: - minrqlst, insertedvars = vfilter.filter( - sources, terms, scope, set(solindices), needsel, final) - if inputmap is None: - inputmap = subinputmap - else: - inputmap.update(subinputmap) - steps.append(ppi.build_final_part(minrqlst, solindices, inputmap, - sources, insertedvars)) - else: - # this is a final part (i.e. retreiving results for the - # original query part) if all term / sources have been - # treated or if this is the last shot for used solutions - minrqlst, insertedvars = vfilter.filter( - sources, terms, scope, solindices, needsel, final) - if final: - solsinputmaps = ppi.merge_input_maps( - solindices, complete=not (final and multifinal)) - if len(solsinputmaps) > 1: - refrqlst = minrqlst - for solindices, inputmap in solsinputmaps: - if inputmap is None: - inputmap = subinputmap - else: - inputmap.update(subinputmap) - if len(solsinputmaps) > 1: - minrqlst = refrqlst.copy() - sources = sources[:] - if inputmap and len(sources) > 1: - sources.remove(ppi.system_source) - steps.append(ppi.build_final_part(minrqlst, solindices, None, - sources, insertedvars)) - steps.append(ppi.build_final_part(minrqlst, solindices, inputmap, - [ppi.system_source], insertedvars)) - else: - steps.append(ppi.build_final_part(minrqlst, solindices, inputmap, - sources, insertedvars)) - else: - table = plan.make_temp_table_name('T%s' % make_uid(id(select))) - ppi.build_non_final_part(minrqlst, solindices, sources, - insertedvars, table) - # finally: join parts, deal with aggregat/group/sorts if necessary - if atemptable is not None: - step = AggrStep(plan, selection, select, atemptable, temptable) - step.children = steps - elif len(steps) > 1: - getrschema = self.schema.rschema - if need_intersect(select, getrschema) or any(need_intersect(select, getrschema) - for step in steps - for select in step.union.children): - if temptable: - raise NotImplementedError('oops') # IntersectFetchStep(plan) - else: - step = IntersectStep(plan) - else: - if temptable: - step = UnionFetchStep(plan) - else: - step = UnionStep(plan) - step.children = steps - else: - step = steps[0] - if select.limit is not None or select.offset: - step.set_limit_offset(select.limit, select.offset) - return step - - -class UnsupportedBranch(Exception): - pass - - -class TermsFiltererVisitor(object): - def __init__(self, schema, ppi): - self.schema = schema - self.ppi = ppi - self.skip = {} - self.hasaggrstep = self.ppi.temptable - self.extneedsel = frozenset(vref.name for sortterm in ppi.rqlst.orderby - for vref in sortterm.iget_nodes(VariableRef)) - - def _rqlst_accept(self, rqlst, node, newroot, terms, setfunc=None): - try: - newrestr, node_ = node.accept(self, newroot, terms[:]) - except UnsupportedBranch: - return rqlst - if setfunc is not None and newrestr is not None: - setfunc(newrestr) - if not node_ is node: - rqlst = node.parent - return rqlst - - def filter(self, sources, terms, rqlst, solindices, needsel, final): - if server.DEBUG & server.DBG_MS: - print 'filter', final and 'final' or '', sources, terms, rqlst, solindices, needsel - newroot = Select() - self.sources = sorted(sources) - self.terms = terms - self.solindices = solindices - self.final = final - self._pending_vrefs = [] - # terms which appear in unsupported branches - needsel |= self.extneedsel - self.needsel = needsel - # terms which appear in supported branches - self.mayneedsel = set() - # new inserted variables - self.insertedvars = [] - # other structures (XXX document) - self.mayneedvar, self.hasvar = {}, {} - self.use_only_defined = False - self.scopes = {rqlst: newroot} - self.current_scope = rqlst - if rqlst.where: - rqlst = self._rqlst_accept(rqlst, rqlst.where, newroot, terms, - newroot.set_where) - if isinstance(rqlst, Select): - self.use_only_defined = True - if rqlst.groupby: - groupby = [] - for node in rqlst.groupby: - rqlst = self._rqlst_accept(rqlst, node, newroot, terms, - groupby.append) - if groupby: - newroot.set_groupby(groupby) - if rqlst.having: - having = [] - for node in rqlst.having: - rqlst = self._rqlst_accept(rqlst, node, newroot, terms, - having.append) - if having: - newroot.set_having(having) - if final and rqlst.orderby and not self.hasaggrstep: - orderby = [] - for node in rqlst.orderby: - rqlst = self._rqlst_accept(rqlst, node, newroot, terms, - orderby.append) - if orderby: - newroot.set_orderby(orderby) - elif rqlst.orderby: - for sortterm in rqlst.orderby: - if any(f for f in sortterm.iget_nodes(Function) if f.name == 'FTIRANK'): - newnode, oldnode = sortterm.accept(self, newroot, terms) - if newnode is not None: - newroot.add_sort_term(newnode) - self.process_selection(newroot, terms, rqlst) - elif not newroot.where: - # no restrictions have been copied, just select terms and add - # type restriction (done later by add_types_restriction) - for v in terms: - if not isinstance(v, Variable): - continue - newroot.append_selected(VariableRef(newroot.get_variable(v.name))) - solutions = self.ppi.copy_solutions(solindices) - cleanup_solutions(newroot, solutions) - newroot.set_possible_types(solutions) - if final: - if self.hasaggrstep: - self.add_necessary_selection(newroot, self.mayneedsel & self.extneedsel) - newroot.distinct = rqlst.distinct - else: - self.add_necessary_selection(newroot, self.mayneedsel & self.needsel) - # insert vars to fetch constant values when needed - for (varname, rschema), reldefs in self.mayneedvar.iteritems(): - for rel, ored in reldefs: - if not (varname, rschema) in self.hasvar: - self.hasvar[(varname, rschema)] = None # just to avoid further insertion - cvar = newroot.make_variable() - for sol in newroot.solutions: - sol[cvar.name] = rschema.objects(sol[varname])[0] - # if the current restriction is not used in a OR branch, - # we can keep it, else we have to drop the constant - # restriction (or we may miss some results) - if not ored: - rel = rel.copy(newroot) - newroot.add_restriction(rel) - # add a relation to link the variable - newroot.remove_node(rel.children[1]) - cmp = Comparison('=') - rel.append(cmp) - cmp.append(VariableRef(cvar)) - self.insertedvars.append((varname, rschema, cvar.name)) - newroot.append_selected(VariableRef(newroot.get_variable(cvar.name))) - # NOTE: even if the restriction is done by this query, we have - # to let it in the original rqlst so that it appears anyway in - # the "final" query, else we may change the meaning of the query - # if there are NOT somewhere : - # 'NOT X relation Y, Y name "toto"' means X WHERE X isn't related - # to Y whose name is toto while - # 'NOT X relation Y' means X WHERE X has no 'relation' (whatever Y) - elif ored: - newroot.remove_node(rel) - add_types_restriction(self.schema, rqlst, newroot, solutions) - if server.DEBUG & server.DBG_MS: - print '--->', newroot - return newroot, self.insertedvars - - def visit_and(self, node, newroot, terms): - subparts = [] - for i in xrange(len(node.children)): - child = node.children[i] - try: - newchild, child_ = child.accept(self, newroot, terms) - if not child_ is child: - node = child_.parent - if newchild is None: - continue - subparts.append(newchild) - except UnsupportedBranch: - continue - if not subparts: - return None, node - if len(subparts) == 1: - return subparts[0], node - return copy_node(newroot, node, subparts), node - - visit_or = visit_and - - def _relation_supported(self, relation): - rtype = relation.r_type - for source in self.sources: - if not source.support_relation(rtype) or ( - rtype in source.cross_relations and not relation in self.terms): - return False - if not self.final and not relation in self.terms: - rschema = self.schema.rschema(relation.r_type) - if not rschema.final: - for term in relation.get_nodes((VariableRef, Constant)): - term = getattr(term, 'variable', term) - termsources = sorted(set(x[0] for x in self.ppi._term_sources(term))) - if termsources and termsources != self.sources: - return False - return True - - def visit_relation(self, node, newroot, terms): - if not node.is_types_restriction(): - if not node in terms and node in self.skip and self.solindices.issubset(self.skip[node]): - return None, node - if not self._relation_supported(node): - raise UnsupportedBranch() - # don't copy type restriction unless this is the only supported relation - # for the lhs variable, else they'll be reinserted later as needed (in - # other cases we may copy a type restriction while the variable is not - # actually used) - elif not (node.neged(strict=True) or - any(self._relation_supported(rel) - for rel in node.children[0].variable.stinfo['relations'])): - return self.visit_default(node, newroot, terms) - else: - raise UnsupportedBranch() - rschema = self.schema.rschema(node.r_type) - self._pending_vrefs = [] - try: - res = self.visit_default(node, newroot, terms)[0] - except Exception: - # when a relation isn't supported, we should dereference potentially - # introduced variable refs - for vref in self._pending_vrefs: - vref.unregister_reference() - raise - ored = node.ored() - if rschema.final or rschema.inlined: - vrefs = node.children[1].get_nodes(VariableRef) - if not vrefs: - if not ored: - self.skip.setdefault(node, set()).update(self.solindices) - else: - self.mayneedvar.setdefault((node.children[0].name, rschema), []).append( (res, ored) ) - else: - assert len(vrefs) == 1 - vref = vrefs[0] - # XXX check operator ? - self.hasvar[(node.children[0].name, rschema)] = vref - if self._may_skip_attr_rel(rschema, node, vref, ored, terms, res): - self.skip.setdefault(node, set()).update(self.solindices) - elif not ored: - self.skip.setdefault(node, set()).update(self.solindices) - return res, node - - def _may_skip_attr_rel(self, rschema, rel, vref, ored, terms, res): - var = vref.variable - if ored: - return False - if var.name in self.extneedsel or var.stinfo['selected']: - return False - if not var in terms or used_in_outer_scope(var, self.current_scope): - return False - if any(v for v, _ in var.stinfo.get('attrvars', ()) if not v in terms): - return False - return True - - def visit_exists(self, node, newroot, terms): - newexists = node.__class__() - self.scopes = {node: newexists} - subparts, node = self._visit_children(node, newroot, terms) - if not subparts: - return None, node - newexists.set_where(subparts[0]) - return newexists, node - - def visit_not(self, node, newroot, terms): - subparts, node = self._visit_children(node, newroot, terms) - if not subparts: - return None, node - return copy_node(newroot, node, subparts), node - - def visit_group(self, node, newroot, terms): - if not self.final: - return None, node - return self.visit_default(node, newroot, terms) - - def visit_variableref(self, node, newroot, terms): - if self.use_only_defined: - if not node.variable.name in newroot.defined_vars: - raise UnsupportedBranch(node.name) - elif not node.variable in terms: - raise UnsupportedBranch(node.name) - self.mayneedsel.add(node.name) - # set scope so we can insert types restriction properly - newvar = newroot.get_variable(node.name) - newvar.stinfo['scope'] = self.scopes.get(node.variable.scope, newroot) - vref = VariableRef(newvar) - self._pending_vrefs.append(vref) - return vref, node - - def visit_constant(self, node, newroot, terms): - return copy_node(newroot, node), node - - def visit_comparison(self, node, newroot, terms): - subparts, node = self._visit_children(node, newroot, terms) - copy = copy_node(newroot, node, subparts) - # ignore comparison operator when fetching non final query - if not self.final and isinstance(node.children[0], VariableRef): - copy.operator = '=' - return copy, node - - def visit_function(self, node, newroot, terms): - if node.name == 'FTIRANK': - # FTIRANK is somewhat special... Rank function should be included in - # the same query has the has_text relation, potentially added to - # selection for latter usage - if not self.hasaggrstep and self.final and node not in self.skip: - return self.visit_default(node, newroot, terms) - elif any(s for s in self.sources if s.uri != 'system'): - return None, node - # p = node.parent - # while p is not None and not isinstance(p, SortTerm): - # p = p.parent - # if isinstance(p, SortTerm): - if not self.hasaggrstep and self.final and node in self.skip: - return Constant(self.skip[node], 'Int'), node - # XXX only if not yet selected - newroot.append_selected(node.copy(newroot)) - self.skip[node] = len(newroot.selection) - return None, node - return self.visit_default(node, newroot, terms) - - def visit_default(self, node, newroot, terms): - subparts, node = self._visit_children(node, newroot, terms) - return copy_node(newroot, node, subparts), node - - visit_mathexpression = visit_constant = visit_default - - def visit_sortterm(self, node, newroot, terms): - subparts, node = self._visit_children(node, newroot, terms) - if not subparts: - return None, node - return copy_node(newroot, node, subparts), node - - def _visit_children(self, node, newroot, terms): - subparts = [] - for i in xrange(len(node.children)): - child = node.children[i] - newchild, child_ = child.accept(self, newroot, terms) - if not child is child_: - node = child_.parent - if newchild is not None: - subparts.append(newchild) - return subparts, node - - def process_selection(self, newroot, terms, rqlst): - if self.final: - for term in rqlst.selection: - newroot.append_selected(term.copy(newroot)) - for vref in term.get_nodes(VariableRef): - self.needsel.add(vref.name) - return - for term in rqlst.selection: - vrefs = term.get_nodes(VariableRef) - if vrefs: - supportedvars = [] - for vref in vrefs: - var = vref.variable - if var in terms: - supportedvars.append(vref) - continue - else: - self.needsel.add(vref.name) - break - else: - for vref in vrefs: - newroot.append_selected(vref.copy(newroot)) - supportedvars = [] - for vref in supportedvars: - if not vref in newroot.get_selected_variables(): - newroot.append_selected(VariableRef(newroot.get_variable(vref.name))) - elif term in self.terms: - newroot.append_selected(term.copy(newroot)) - - def add_necessary_selection(self, newroot, terms): - selected = tuple(newroot.get_selected_variables()) - for varname in terms: - var = newroot.defined_vars[varname] - for vref in var.references(): - rel = vref.relation() - if rel is None and vref in selected: - # already selected - break - else: - selvref = VariableRef(var) - newroot.append_selected(selvref) - if newroot.groupby: - newroot.add_group_var(VariableRef(selvref.variable, noautoref=1)) diff -r 84738d495ffd -r 793377697c81 server/mssteps.py --- a/server/mssteps.py Wed Sep 24 17:35:59 2014 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,309 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Defines the diferent querier steps usable in plans. - -FIXME : this code needs refactoring. Some problems : -* get data from the parent plan, the latest step, temporary table... -* each step has is own members (this is not necessarily bad, but a bit messy - for now) -""" -__docformat__ = "restructuredtext en" - -from rql.nodes import VariableRef, Variable, Function - -from cubicweb.server.ssplanner import (LimitOffsetMixIn, Step, OneFetchStep, - varmap_test_repr, offset_result) - -AGGR_TRANSFORMS = {'COUNT':'SUM', 'MIN':'MIN', 'MAX':'MAX', 'SUM': 'SUM'} - -class remove_and_restore_clauses(object): - def __init__(self, union, keepgroup): - self.union = union - self.keepgroup = keepgroup - self.clauses = None - - def __enter__(self): - self.clauses = clauses = [] - for select in self.union.children: - if self.keepgroup: - having, orderby = select.having, select.orderby - select.having, select.orderby = (), () - clauses.append( (having, orderby) ) - else: - groupby, having, orderby = select.groupby, select.having, select.orderby - select.groupby, select.having, select.orderby = (), (), () - clauses.append( (groupby, having, orderby) ) - - def __exit__(self, exctype, exc, traceback): - for i, select in enumerate(self.union.children): - if self.keepgroup: - select.having, select.orderby = self.clauses[i] - else: - select.groupby, select.having, select.orderby = self.clauses[i] - - -class FetchStep(OneFetchStep): - """step consisting in fetching data from sources, and storing result in - a temporary table - """ - def __init__(self, plan, union, sources, table, keepgroup, inputmap=None): - OneFetchStep.__init__(self, plan, union, sources) - # temporary table to store step result - self.table = table - # should groupby clause be kept or not - self.keepgroup = keepgroup - # variables mapping to use as input - self.inputmap = inputmap - # output variable mapping - srqlst = union.children[0] # sample select node - # add additional information to the output mapping - self.outputmap = plan.init_temp_table(table, srqlst.selection, - srqlst.solutions[0]) - for vref in srqlst.selection: - if not isinstance(vref, VariableRef): - continue - var = vref.variable - if var.stinfo.get('attrvars'): - for lhsvar, rtype in var.stinfo['attrvars']: - if lhsvar.name in srqlst.defined_vars: - key = '%s.%s' % (lhsvar.name, rtype) - self.outputmap[key] = self.outputmap[var.name] - else: - rschema = self.plan.schema.rschema - for rel in var.stinfo['rhsrelations']: - if rschema(rel.r_type).inlined: - lhsvar = rel.children[0] - if lhsvar.name in srqlst.defined_vars: - key = '%s.%s' % (lhsvar.name, rel.r_type) - self.outputmap[key] = self.outputmap[var.name] - - def execute(self): - """execute this step""" - self.execute_children() - plan = self.plan - plan.create_temp_table(self.table) - union = self.union - with remove_and_restore_clauses(union, self.keepgroup): - for source in self.sources: - source.flying_insert(self.table, plan.session, union, plan.args, - self.inputmap) - - def mytest_repr(self): - """return a representation of this step suitable for test""" - with remove_and_restore_clauses(self.union, self.keepgroup): - try: - inputmap = varmap_test_repr(self.inputmap, self.plan.tablesinorder) - outputmap = varmap_test_repr(self.outputmap, self.plan.tablesinorder) - except AttributeError: - inputmap = self.inputmap - outputmap = self.outputmap - return (self.__class__.__name__, - sorted((r.as_string(kwargs=self.plan.args), r.solutions) - for r in self.union.children), - sorted(self.sources), inputmap, outputmap) - - -class AggrStep(LimitOffsetMixIn, Step): - """step consisting in making aggregat from temporary data in the system - source - """ - def __init__(self, plan, selection, select, table, outputtable=None): - Step.__init__(self, plan) - # original selection - self.selection = selection - # original Select RQL tree - self.select = select - # table where are located temporary results - self.table = table - # optional table where to write results - self.outputtable = outputtable - if outputtable is not None: - plan.init_temp_table(outputtable, selection, select.solutions[0]) - - #self.inputmap = inputmap - - def mytest_repr(self): - """return a representation of this step suitable for test""" - try: - # rely on a monkey patch (cf unittest_querier) - table = self.plan.tablesinorder[self.table] - outputtable = self.outputtable and self.plan.tablesinorder[self.outputtable] - except AttributeError: - # not monkey patched - table = self.table - outputtable = self.outputtable - sql = self.get_sql().replace(self.table, table) - return (self.__class__.__name__, sql, outputtable) - - def execute(self): - """execute this step""" - self.execute_children() - sql = self.get_sql() - if self.outputtable: - self.plan.create_temp_table(self.outputtable) - sql = 'INSERT INTO %s %s' % (self.outputtable, sql) - self.plan.syssource.doexec(self.plan.session, sql, self.plan.args) - else: - return self.plan.sqlexec(sql, self.plan.args) - - def get_sql(self): - self.inputmap = inputmap = self.children[-1].outputmap - dbhelper=self.plan.syssource.dbhelper - # get the select clause - clause = [] - for i, term in enumerate(self.selection): - try: - var_name = inputmap[term.as_string()] - except KeyError: - var_name = 'C%s' % i - if isinstance(term, Function): - # we have to translate some aggregat function - # (for instance COUNT -> SUM) - orig_name = term.name - try: - term.name = AGGR_TRANSFORMS[term.name] - # backup and reduce children - orig_children = term.children - term.children = [VariableRef(Variable(var_name))] - clause.append(term.accept(self)) - # restaure the tree XXX necessary? - term.name = orig_name - term.children = orig_children - except KeyError: - clause.append(var_name) - else: - clause.append(var_name) - for vref in term.iget_nodes(VariableRef): - inputmap[vref.name] = var_name - # XXX handle distinct with non selected sort term - if self.select.distinct: - sql = ['SELECT DISTINCT %s' % ', '.join(clause)] - else: - sql = ['SELECT %s' % ', '.join(clause)] - sql.append("FROM %s" % self.table) - # get the group/having clauses - if self.select.groupby: - clause = [inputmap[var.name] for var in self.select.groupby] - grouped = set(var.name for var in self.select.groupby) - sql.append('GROUP BY %s' % ', '.join(clause)) - else: - grouped = None - if self.select.having: - clause = [term.accept(self) for term in self.select.having] - sql.append('HAVING %s' % ', '.join(clause)) - # get the orderby clause - if self.select.orderby: - clause = [] - for sortterm in self.select.orderby: - sqlterm = sortterm.term.accept(self) - if sortterm.asc: - clause.append(sqlterm) - else: - clause.append('%s DESC' % sqlterm) - if grouped is not None: - for vref in sortterm.iget_nodes(VariableRef): - if not vref.name in grouped: - sql[-1] += ', ' + self.inputmap[vref.name] - grouped.add(vref.name) - sql = dbhelper.sql_add_order_by(' '.join(sql), - clause, - None, False, - self.limit or self.offset) - else: - sql = ' '.join(sql) - clause = None - - sql = dbhelper.sql_add_limit_offset(sql, self.limit, self.offset, clause) - return sql - - def visit_function(self, function): - """generate SQL name for a function""" - try: - return self.children[0].outputmap[str(function)] - except KeyError: - return '%s(%s)' % (function.name, - ','.join(c.accept(self) for c in function.children)) - - def visit_variableref(self, variableref): - """get the sql name for a variable reference""" - try: - return self.inputmap[variableref.name] - except KeyError: # XXX duh? explain - return variableref.variable.name - - def visit_constant(self, constant): - """generate SQL name for a constant""" - assert constant.type == 'Int' - return str(constant.value) - - -class UnionStep(LimitOffsetMixIn, Step): - """union results of child in-memory steps (e.g. OneFetchStep / AggrStep)""" - - def execute(self): - """execute this step""" - result = [] - limit = olimit = self.limit - offset = self.offset - assert offset != 0 - if offset is not None: - limit = limit + offset - for step in self.children: - if limit is not None: - if offset is None: - limit = olimit - len(result) - step.set_limit_offset(limit, None) - result_ = step.execute() - if offset is not None: - offset, result_ = offset_result(offset, result_) - result += result_ - if limit is not None: - if len(result) >= olimit: - return result[:olimit] - return result - - def mytest_repr(self): - """return a representation of this step suitable for test""" - return (self.__class__.__name__, self.limit, self.offset) - - -class IntersectStep(UnionStep): - """return intersection of results of child in-memory steps (e.g. OneFetchStep / AggrStep)""" - - def execute(self): - """execute this step""" - result = set() - for step in self.children: - result &= frozenset(step.execute()) - result = list(result) - if self.offset: - result = result[self.offset:] - if self.limit: - result = result[:self.limit] - return result - - -class UnionFetchStep(Step): - """union results of child steps using temporary tables (e.g. FetchStep)""" - - def execute(self): - """execute this step""" - self.execute_children() - - -__all__ = ('FetchStep', 'AggrStep', 'UnionStep', 'UnionFetchStep', 'IntersectStep') diff -r 84738d495ffd -r 793377697c81 server/pool.py --- a/server/pool.py Wed Sep 24 17:35:59 2014 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,160 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""CubicWeb server connections set : the repository has a limited number of -:class:`ConnectionsSet` (defined in configuration, default to 4). Each of them -hold a connection for each source used by the repository. -""" - -__docformat__ = "restructuredtext en" - -import sys - -class ConnectionsSet(object): - """handle connections on a set of sources, at some point associated to a - :class:`Session` - """ - - def __init__(self, sources): - # dictionary of (source, connection), indexed by sources'uri - self.source_cnxs = {} - for source in sources: - self.add_source(source) - if not 'system' in self.source_cnxs: - self.source_cnxs['system'] = self.source_cnxs[sources[0].uri] - self._cursors = {} - - def __getitem__(self, uri): - """subscription notation provide access to sources'cursors""" - try: - cursor = self._cursors[uri] - except KeyError: - cursor = self.source_cnxs[uri][1].cursor() - if cursor is not None: - # None possible on sources without cursor support such as ldap - self._cursors[uri] = cursor - return cursor - - def add_source(self, source): - assert not source.uri in self.source_cnxs - self.source_cnxs[source.uri] = (source, source.get_connection()) - - def remove_source(self, source): - source, cnx = self.source_cnxs.pop(source.uri) - cnx.close() - self._cursors.pop(source.uri, None) - - def commit(self): - """commit the current transaction for this user""" - # FIXME: what happends if a commit fail - # would need a two phases commit or like, but I don't know how to do - # this using the db-api... - for source, cnx in self.source_cnxs.itervalues(): - # let exception propagates - cnx.commit() - - def rollback(self): - """rollback the current transaction for this user""" - for source, cnx in self.source_cnxs.itervalues(): - # catch exceptions, rollback other sources anyway - try: - cnx.rollback() - except Exception: - source.critical('rollback error', exc_info=sys.exc_info()) - # error on rollback, the connection is much probably in a really - # bad state. Replace it by a new one. - self.reconnect(source) - - def close(self, i_know_what_i_do=False): - """close all connections in the set""" - if i_know_what_i_do is not True: # unexpected closing safety belt - raise RuntimeError('connections set shouldn\'t be closed') - for cu in self._cursors.itervalues(): - try: - cu.close() - except Exception: - continue - for _, cnx in self.source_cnxs.itervalues(): - try: - cnx.close() - except Exception: - continue - - # internals ############################################################### - - def cnxset_set(self): - """connections set is being set on a session""" - self.check_connections() - - def cnxset_freed(self): - """connections set is being freed from a session""" - for source, cnx in self.source_cnxs.itervalues(): - source.cnxset_freed(cnx) - - def sources(self): - """return the source objects handled by this connections set""" - # implementation details of flying insert requires the system source - # first - yield self.source_cnxs['system'][0] - for uri, (source, cnx) in self.source_cnxs.items(): - if uri == 'system': - continue - yield source - #return [source_cnx[0] for source_cnx in self.source_cnxs.itervalues()] - - def source(self, uid): - """return the source object with the given uri""" - return self.source_cnxs[uid][0] - - def connection(self, uid): - """return the connection on the source object with the given uri""" - return self.source_cnxs[uid][1] - - def reconnect(self, source=None): - """reopen a connection for this source or all sources if none specified - """ - if source is None: - sources = self.sources() - else: - sources = (source,) - for source in sources: - try: - # properly close existing connection if any - self.source_cnxs[source.uri][1].close() - except Exception: - pass - source.info('trying to reconnect') - self.source_cnxs[source.uri] = (source, source.get_connection()) - self._cursors.pop(source.uri, None) - - def check_connections(self): - for source, cnx in self.source_cnxs.itervalues(): - newcnx = source.check_connection(cnx) - if newcnx is not None: - self.reset_connection(source, newcnx) - - def reset_connection(self, source, cnx): - self.source_cnxs[source.uri] = (source, cnx) - self._cursors.pop(source.uri, None) - - -from cubicweb.server.hook import Operation, LateOperation, SingleLastOperation -from logilab.common.deprecation import class_moved, class_renamed -Operation = class_moved(Operation) -PreCommitOperation = class_renamed('PreCommitOperation', Operation) -LateOperation = class_moved(LateOperation) -SingleLastOperation = class_moved(SingleLastOperation) diff -r 84738d495ffd -r 793377697c81 server/querier.py --- a/server/querier.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/querier.py Wed Sep 24 18:04:30 2014 +0200 @@ -35,7 +35,7 @@ from cubicweb.server.rqlannotation import SQLGenAnnotator, set_qdata from cubicweb.server.ssplanner import READ_ONLY_RTYPES, add_types_restriction from cubicweb.server.edition import EditedEntity - +from cubicweb.server.ssplanner import SSPlanner ETYPE_PYOBJ_MAP[Binary] = 'Bytes' @@ -63,16 +63,16 @@ if etype == 'Password': raise Unauthorized('Password selection is not allowed (%s)' % var) -def term_etype(session, term, solution, args): +def term_etype(cnx, term, solution, args): """return the entity type for the given term (a VariableRef or a Constant node) """ try: return solution[term.name] except AttributeError: - return session.describe(term.eval(args))[0] + return cnx.entity_metas(term.eval(args))['type'] -def check_read_access(session, rqlst, solution, args): +def check_read_access(cnx, rqlst, solution, args): """Check that the given user has credentials to access data read by the query and return a dict defining necessary "local checks" (i.e. rql expression in read permission defined in the schema) where no group grants @@ -85,7 +85,7 @@ # when used as an external source by another repository. # XXX what about local read security w/ those rewritten constants... DBG = (server.DEBUG & server.DBG_SEC) and 'read' in server._SECURITY_CAPS - schema = session.repo.schema + schema = cnx.repo.schema if rqlst.where is not None: for rel in rqlst.where.iget_nodes(Relation): # XXX has_text may have specific perm ? @@ -93,37 +93,37 @@ continue rschema = schema.rschema(rel.r_type) if rschema.final: - eschema = schema.eschema(term_etype(session, rel.children[0], + eschema = schema.eschema(term_etype(cnx, rel.children[0], solution, args)) rdef = eschema.rdef(rschema) else: - rdef = rschema.rdef(term_etype(session, rel.children[0], + rdef = rschema.rdef(term_etype(cnx, rel.children[0], solution, args), - term_etype(session, rel.children[1].children[0], + term_etype(cnx, rel.children[1].children[0], solution, args)) - if not session.user.matching_groups(rdef.get_groups('read')): + if not cnx.user.matching_groups(rdef.get_groups('read')): if DBG: print ('check_read_access: %s %s does not match %s' % - (rdef, session.user.groups, rdef.get_groups('read'))) + (rdef, cnx.user.groups, rdef.get_groups('read'))) # XXX rqlexpr not allowed raise Unauthorized('read', rel.r_type) if DBG: print ('check_read_access: %s %s matches %s' % - (rdef, session.user.groups, rdef.get_groups('read'))) + (rdef, cnx.user.groups, rdef.get_groups('read'))) localchecks = {} # iterate on defined_vars and not on solutions to ignore column aliases for varname in rqlst.defined_vars: eschema = schema.eschema(solution[varname]) if eschema.final: continue - if not session.user.matching_groups(eschema.get_groups('read')): + if not cnx.user.matching_groups(eschema.get_groups('read')): erqlexprs = eschema.get_rqlexprs('read') if not erqlexprs: ex = Unauthorized('read', solution[varname]) ex.var = varname if DBG: print ('check_read_access: %s %s %s %s' % - (varname, eschema, session.user.groups, eschema.get_groups('read'))) + (varname, eschema, cnx.user.groups, eschema.get_groups('read'))) raise ex # don't insert security on variable only referenced by 'NOT X relation Y' or # 'NOT EXISTS(X relation Y)' @@ -143,23 +143,21 @@ class ExecutionPlan(object): """the execution model of a rql query, composed of querier steps""" - def __init__(self, querier, rqlst, args, session): + def __init__(self, querier, rqlst, args, cnx): # original rql syntax tree self.rqlst = rqlst self.args = args or {} - # session executing the query - self.session = session + # cnx executing the query + self.cnx = cnx # quick reference to the system source - self.syssource = session.cnxset.source('system') + self.syssource = cnx.repo.system_source # execution steps self.steps = [] - # index of temporary tables created during execution - self.temp_tables = {} # various resource accesors self.querier = querier self.schema = querier.schema self.sqlannotate = querier.sqlgen_annotate - self.rqlhelper = session.vreg.rqlhelper + self.rqlhelper = cnx.vreg.rqlhelper def annotate_rqlst(self): if not self.rqlst.annotated: @@ -169,49 +167,15 @@ """add a step to the plan""" self.steps.append(step) - def clean(self): - """remove temporary tables""" - self.syssource.clean_temp_data(self.session, self.temp_tables) - def sqlexec(self, sql, args=None): - return self.syssource.sqlexec(self.session, sql, args) + return self.syssource.sqlexec(self.cnx, sql, args) def execute(self): """execute a plan and return resulting rows""" - try: - for step in self.steps: - result = step.execute() - # the latest executed step contains the full query result - return result - finally: - self.clean() - - def make_temp_table_name(self, table): - """ - return a temp table name according to db backend - """ - return self.syssource.make_temp_table_name(table) - - - def init_temp_table(self, table, selected, sol): - """initialize sql schema and variable map for a temporary table which - will be used to store result for the given rqlst - """ - try: - outputmap, sqlschema, _ = self.temp_tables[table] - update_varmap(outputmap, selected, table) - except KeyError: - sqlschema, outputmap = self.syssource.temp_table_def(selected, sol, - table) - self.temp_tables[table] = [outputmap, sqlschema, False] - return outputmap - - def create_temp_table(self, table): - """create a temporary table to store result for the given rqlst""" - if not self.temp_tables[table][-1]: - sqlschema = self.temp_tables[table][1] - self.syssource.create_temp_table(self.session, table, sqlschema) - self.temp_tables[table][-1] = True + for step in self.steps: + result = step.execute() + # the latest executed step contains the full query result + return result def preprocess(self, union, security=True): """insert security when necessary then annotate rql st for sql generation @@ -219,15 +183,15 @@ return rqlst to actually execute """ cached = None - if security and self.session.read_security: + if security and self.cnx.read_security: # ensure security is turned of when security is inserted, # else we may loop for ever... - if self.session.transaction_data.get('security-rqlst-cache'): + if self.cnx.transaction_data.get('security-rqlst-cache'): key = self.cache_key else: key = None - if key is not None and key in self.session.transaction_data: - cachedunion, args = self.session.transaction_data[key] + if key is not None and key in self.cnx.transaction_data: + cachedunion, args = self.cnx.transaction_data[key] union.children[:] = [] for select in cachedunion.children: union.append(select) @@ -236,10 +200,10 @@ self.args = args cached = True else: - with self.session.security_enabled(read=False): + with self.cnx.security_enabled(read=False): noinvariant = self._insert_security(union) if key is not None: - self.session.transaction_data[key] = (union, self.args) + self.cnx.transaction_data[key] = (union, self.args) else: noinvariant = () if cached is None: @@ -256,7 +220,7 @@ self._insert_security(subquery.query) localchecks, restricted = self._check_permissions(select) if any(localchecks): - self.session.rql_rewriter.insert_local_checks( + self.cnx.rql_rewriter.insert_local_checks( select, self.args, localchecks, restricted, noinvariant) return noinvariant @@ -278,12 +242,12 @@ Note rqlst should not have been simplified at this point. """ - session = self.session + cnx = self.cnx msgs = [] # dict(varname: eid), allowing to check rql expression for variables # which have a known eid varkwargs = {} - if not session.transaction_data.get('security-rqlst-cache'): + if not cnx.transaction_data.get('security-rqlst-cache'): for var in rqlst.defined_vars.itervalues(): if var.stinfo['constnode'] is not None: eid = var.stinfo['constnode'].eval(self.args) @@ -294,10 +258,10 @@ newsolutions = [] for solution in rqlst.solutions: try: - localcheck = check_read_access(session, rqlst, solution, self.args) + localcheck = check_read_access(cnx, rqlst, solution, self.args) except Unauthorized as ex: msg = 'remove %s from solutions since %s has no %s access to %s' - msg %= (solution, session.user.login, ex.args[0], ex.args[1]) + msg %= (solution, cnx.user.login, ex.args[0], ex.args[1]) msgs.append(msg) LOGGER.info(msg) else: @@ -312,10 +276,10 @@ # if entity has been added in the current transaction, the # user can read it whatever rql expressions are associated # to its type - if session.added_in_transaction(eid): + if cnx.added_in_transaction(eid): continue for rqlexpr in rqlexprs: - if rqlexpr.check(session, eid): + if rqlexpr.check(cnx, eid): break else: raise Unauthorized('No read acces on %r with eid %i.' % (var, eid)) @@ -351,8 +315,8 @@ """an execution model specific to the INSERT rql query """ - def __init__(self, querier, rqlst, args, session): - ExecutionPlan.__init__(self, querier, rqlst, args, session) + def __init__(self, querier, rqlst, args, cnx): + ExecutionPlan.__init__(self, querier, rqlst, args, cnx) # save originaly selected variable, we may modify this # dictionary for substitution (query parameters) self.selected = rqlst.selection @@ -450,17 +414,17 @@ if there is two entities matching U, the result set will look like [(eidX1, eidY1), (eidX2, eidY2)] """ - session = self.session - repo = session.repo + cnx = self.cnx + repo = cnx.repo results = [] for row in self.e_defs: - results.append([repo.glob_add_entity(session, edef) + results.append([repo.glob_add_entity(cnx, edef) for edef in row]) return results def insert_relation_defs(self): - session = self.session - repo = session.repo + cnx = self.cnx + repo = cnx.repo edited_entities = {} relations = {} for subj, rtype, obj in self.relation_defs(): @@ -475,7 +439,7 @@ obj = obj.entity.eid if repo.schema.rschema(rtype).inlined: if subj not in edited_entities: - entity = session.entity_from_eid(subj) + entity = cnx.entity_from_eid(subj) edited = EditedEntity(entity) edited_entities[subj] = edited else: @@ -486,9 +450,9 @@ relations[rtype].append((subj, obj)) else: relations[rtype] = [(subj, obj)] - repo.glob_add_relations(session, relations) + repo.glob_add_relations(cnx, relations) for edited in edited_entities.itervalues(): - repo.glob_update_entity(session, edited) + repo.glob_update_entity(cnx, edited) class QuerierHelper(object): @@ -515,27 +479,14 @@ self.solutions = repo.vreg.solutions rqlhelper = repo.vreg.rqlhelper # set backend on the rql helper, will be used for function checking - rqlhelper.backend = repo.config.sources()['system']['db-driver'] + rqlhelper.backend = repo.config.system_source_config['db-driver'] self._parse = rqlhelper.parse self._annotate = rqlhelper.annotate # rql planner - if len(repo.sources) < 2: - from cubicweb.server.ssplanner import SSPlanner - self._planner = SSPlanner(schema, rqlhelper) - else: - from cubicweb.server.msplanner import MSPlanner - self._planner = MSPlanner(schema, rqlhelper) + self._planner = SSPlanner(schema, rqlhelper) # sql generation annotator self.sqlgen_annotate = SQLGenAnnotator(schema).annotate - def set_planner(self): - if len(self._repo.sources) < 2: - from cubicweb.server.ssplanner import SSPlanner - self._planner = SSPlanner(self.schema, self._repo.vreg.rqlhelper) - else: - from cubicweb.server.msplanner import MSPlanner - self._planner = MSPlanner(self.schema, self._repo.vreg.rqlhelper) - def parse(self, rql, annotate=False): """return a rql syntax tree for the given rql""" try: @@ -543,13 +494,13 @@ except UnicodeError: raise RQLSyntaxError(rql) - def plan_factory(self, rqlst, args, session): + def plan_factory(self, rqlst, args, cnx): """create an execution plan for an INSERT RQL query""" if rqlst.TYPE == 'insert': - return InsertPlan(self, rqlst, args, session) - return ExecutionPlan(self, rqlst, args, session) + return InsertPlan(self, rqlst, args, cnx) + return ExecutionPlan(self, rqlst, args, cnx) - def execute(self, session, rql, args=None, build_descr=True): + def execute(self, cnx, rql, args=None, build_descr=True): """execute a rql query, return resulting rows and their description in a `ResultSet` object @@ -583,7 +534,7 @@ # if there are some, we need a better cache key, eg (rql + # entity type of each eid) try: - cachekey = self._repo.querier_cache_key(session, rql, + cachekey = self._repo.querier_cache_key(cnx, rql, args, eidkeys) except UnknownEid: # we want queries such as "Any X WHERE X eid 9999" @@ -599,7 +550,7 @@ # which are eids. Notice that if you may not need `eidkeys`, we # have to compute solutions anyway (kept as annotation on the # tree) - eidkeys = self.solutions(session, rqlst, args) + eidkeys = self.solutions(cnx, rqlst, args) except UnknownEid: # we want queries such as "Any X WHERE X eid 9999" return an # empty result instead of raising UnknownEid @@ -607,19 +558,19 @@ if args and rql not in self._rql_ck_cache: self._rql_ck_cache[rql] = eidkeys if eidkeys: - cachekey = self._repo.querier_cache_key(session, rql, args, + cachekey = self._repo.querier_cache_key(cnx, rql, args, eidkeys) self._rql_cache[cachekey] = rqlst orig_rqlst = rqlst if rqlst.TYPE != 'select': - if session.read_security: + if cnx.read_security: check_no_password_selected(rqlst) - # write query, ensure session's mode is 'write' so connections won't - # be released until commit/rollback - session.mode = 'write' + # write query, ensure connection's mode is 'write' so connections + # won't be released until commit/rollback + cnx.mode = 'write' cachekey = None else: - if session.read_security: + if cnx.read_security: for select in rqlst.children: check_no_password_selected(select) # on select query, always copy the cached rqlst so we don't have to @@ -633,7 +584,7 @@ cachekey += tuple(sorted([k for k, v in args.iteritems() if v is None])) # make an execution plan - plan = self.plan_factory(rqlst, args, session) + plan = self.plan_factory(rqlst, args, cnx) plan.cache_key = cachekey self._planner.build_plan(plan) # execute the plan @@ -645,11 +596,11 @@ # # notes: # * we should not reset the connections set here, since we don't want the - # session to loose it during processing + # connection to loose it during processing # * don't rollback if we're in the commit process, will be handled - # by the session - if session.commit_state is None: - session.commit_state = 'uncommitable' + # by the connection + if cnx.commit_state is None: + cnx.commit_state = 'uncommitable' raise # build a description for the results if necessary descr = () @@ -664,14 +615,14 @@ descr = RepeatList(len(results), tuple(description)) else: # hard, delegate the work :o) - descr = manual_build_descr(session, rqlst, args, results) + descr = manual_build_descr(cnx, rqlst, args, results) elif rqlst.TYPE == 'insert': # on insert plan, some entities may have been auto-casted, # so compute description manually even if there is only # one solution basedescr = [None] * len(plan.selected) todetermine = zip(xrange(len(plan.selected)), repeat(False)) - descr = _build_descr(session, results, basedescr, todetermine) + descr = _build_descr(cnx, results, basedescr, todetermine) # FIXME: get number of affected entities / relations on non # selection queries ? # return a result set object @@ -687,7 +638,7 @@ set_log_methods(QuerierHelper, LOGGER) -def manual_build_descr(tx, rqlst, args, result): +def manual_build_descr(cnx, rqlst, args, result): """build a description for a given result by analysing each row XXX could probably be done more efficiently during execution of query @@ -711,11 +662,11 @@ basedescr.append(ttype) if not todetermine: return RepeatList(len(result), tuple(basedescr)) - return _build_descr(tx, result, basedescr, todetermine) + return _build_descr(cnx, result, basedescr, todetermine) -def _build_descr(tx, result, basedescription, todetermine): +def _build_descr(cnx, result, basedescription, todetermine): description = [] - etype_from_eid = tx.describe + entity_metas = cnx.entity_metas todel = [] for i, row in enumerate(result): row_descr = basedescription[:] @@ -729,9 +680,9 @@ row_descr[index] = etype_from_pyobj(value) else: try: - row_descr[index] = etype_from_eid(value)[0] + row_descr[index] = entity_metas(value)['type'] except UnknownEid: - tx.error('wrong eid %s in repository, you should ' + cnx.error('wrong eid %s in repository, you should ' 'db-check the database' % value) todel.append(i) break diff -r 84738d495ffd -r 793377697c81 server/repository.py --- a/server/repository.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/repository.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -33,26 +33,23 @@ import Queue from warnings import warn from itertools import chain -from os.path import join -from datetime import datetime from time import time, localtime, strftime +from contextlib import contextmanager from warnings import warn from logilab.common.decorators import cached, clear_cache -from logilab.common import flatten +from logilab.common.deprecation import deprecated from yams import BadSchemaDefinition -from yams.schema import role_name from rql import RQLSyntaxError from rql.utils import rqlvar_maker -from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP, QueryError, +from cubicweb import (CW_MIGRATION_MAP, QueryError, UnknownEid, AuthenticationError, ExecutionError, - ETypeNotSupportedBySources, MultiSourcesError, BadConnectionId, Unauthorized, ValidationError, - RepositoryError, UniqueTogetherError, onevent) + UniqueTogetherError, onevent) from cubicweb import cwvreg, schema, server -from cubicweb.server import ShuttingDown, utils, hook, pool, querier, sources +from cubicweb.server import ShuttingDown, utils, hook, querier, sources from cubicweb.server.session import Session, InternalSession, InternalManager from cubicweb.server.ssplanner import EditedEntity @@ -186,14 +183,13 @@ self.shutting_down = False # sources (additional sources info in the system database) self.system_source = self.get_source('native', 'system', - config.sources()['system'].copy()) - self.sources = [self.system_source] + config.system_source_config.copy()) self.sources_by_uri = {'system': self.system_source} # querier helper, need to be created after sources initialization self.querier = querier.QuerierHelper(self, self.schema) - # cache eid -> (type, physical source, extid, actual source) + # cache eid -> (type, extid, actual source) self._type_source_cache = {} - # cache (extid, source uri) -> eid + # cache extid -> eid self._extid_cache = {} # open some connection sets if config.init_cnxset_pool: @@ -217,7 +213,7 @@ self._cnxsets_pool = Queue.Queue() # 0. init a cnxset that will be used to fetch bootstrap information from # the database - self._cnxsets_pool.put_nowait(pool.ConnectionsSet(self.sources)) + self._cnxsets_pool.put_nowait(self.system_source.wrapped_connection()) # 1. set used cubes if config.creating or not config.read_instance_schema: config.bootstrap_cubes() @@ -248,8 +244,7 @@ if config.creating: # call init_creating so that for instance native source can # configurate tsearch according to postgres version - for source in self.sources: - source.init_creating() + self.system_source.init_creating() else: self.init_sources_from_database() if 'CWProperty' in self.schema: @@ -259,7 +254,7 @@ self._get_cnxset().close(True) self.cnxsets = [] # list of available cnxsets (can't iterate on a Queue) for i in xrange(config['connections-pool-size']): - self.cnxsets.append(pool.ConnectionsSet(self.sources)) + self.cnxsets.append(self.system_source.wrapped_connection()) self._cnxsets_pool.put_nowait(self.cnxsets[-1]) # internals ############################################################### @@ -270,9 +265,9 @@ or not 'CWSource' in self.schema: # # 3.10 migration self.system_source.init_creating() return - with self.internal_session() as session: + with self.internal_cnx() as cnx: # FIXME: sources should be ordered (add_entity priority) - for sourceent in session.execute( + for sourceent in cnx.execute( 'Any S, SN, SA, SC WHERE S is_instance_of CWSource, ' 'S name SN, S type SA, S config SC').entities(): if sourceent.name == 'system': @@ -280,16 +275,20 @@ self.sources_by_eid[sourceent.eid] = self.system_source self.system_source.init(True, sourceent) continue - self.add_source(sourceent, add_to_cnxsets=False) + self.add_source(sourceent) def _clear_planning_caches(self): - for cache in ('source_defs', 'is_multi_sources_relation', - 'can_cross_relation', 'rel_type_sources'): - clear_cache(self, cache) + clear_cache(self, 'source_defs') - def add_source(self, sourceent, add_to_cnxsets=True): - source = self.get_source(sourceent.type, sourceent.name, - sourceent.host_config, sourceent.eid) + def add_source(self, sourceent): + try: + source = self.get_source(sourceent.type, sourceent.name, + sourceent.host_config, sourceent.eid) + except RuntimeError: + if self.config.repairing: + self.exception('cant setup source %s, skipped', sourceent.name) + return + raise self.sources_by_eid[sourceent.eid] = source self.sources_by_uri[sourceent.name] = source if self.config.source_enabled(source): @@ -298,14 +297,6 @@ # internal session, which is not possible until connections sets have been # initialized) source.init(True, sourceent) - if not source.copy_based_source: - warn('[3.18] old multi-source system will go away in the next version', - DeprecationWarning) - self.sources.append(source) - self.querier.set_planner() - if add_to_cnxsets: - for cnxset in self.cnxsets: - cnxset.add_source(source) else: source.init(False, sourceent) self._clear_planning_caches() @@ -313,11 +304,6 @@ def remove_source(self, uri): source = self.sources_by_uri.pop(uri) del self.sources_by_eid[source.eid] - if self.config.source_enabled(source) and not source.copy_based_source: - self.sources.remove(source) - self.querier.set_planner() - for cnxset in self.cnxsets: - cnxset.remove_source(source) self._clear_planning_caches() def get_source(self, type, uri, source_config, eid=None): @@ -335,8 +321,6 @@ else: self.vreg._set_schema(schema) self.querier.set_schema(schema) - # don't use self.sources, we may want to give schema even to disabled - # sources for source in self.sources_by_uri.itervalues(): source.set_schema(schema) self.schema = schema @@ -346,9 +330,9 @@ from cubicweb.server.schemaserial import deserialize_schema appschema = schema.CubicWebSchema(self.config.appid) self.debug('deserializing db schema into %s %#x', appschema.name, id(appschema)) - with self.internal_session() as session: + with self.internal_cnx() as cnx: try: - deserialize_schema(appschema, session) + deserialize_schema(appschema, cnx) except BadSchemaDefinition: raise except Exception as ex: @@ -469,7 +453,7 @@ except ZeroDivisionError: pass - def check_auth_info(self, session, login, authinfo): + def check_auth_info(self, cnx, login, authinfo): """validate authentication, raise AuthenticationError on failure, return associated CWUser's eid on success. """ @@ -478,70 +462,55 @@ for source in self.sources_by_uri.itervalues(): if self.config.source_enabled(source) and source.support_entity('CWUser'): try: - return source.authenticate(session, login, **authinfo) + with cnx.ensure_cnx_set: + return source.authenticate(cnx, login, **authinfo) except AuthenticationError: continue else: raise AuthenticationError('authentication failed with all sources') - def authenticate_user(self, session, login, **authinfo): + def authenticate_user(self, cnx, login, **authinfo): """validate login / password, raise AuthenticationError on failure return associated CWUser instance on success """ - eid = self.check_auth_info(session, login, authinfo) - cwuser = self._build_user(session, eid) + eid = self.check_auth_info(cnx, login, authinfo) + cwuser = self._build_user(cnx, eid) if self.config.consider_user_state and \ not cwuser.cw_adapt_to('IWorkflowable').state in cwuser.AUTHENTICABLE_STATES: raise AuthenticationError('user is not in authenticable state') return cwuser - def _build_user(self, session, eid): + def _build_user(self, cnx, eid): """return a CWUser entity for user with the given eid""" - cls = self.vreg['etypes'].etype_class('CWUser') - st = cls.fetch_rqlst(session.user, ordermethod=None) - st.add_eid_restriction(st.get_variable('X'), 'x', 'Substitute') - rset = session.execute(st.as_string(), {'x': eid}) - assert len(rset) == 1, rset - cwuser = rset.get_entity(0, 0) - # pylint: disable=W0104 - # prefetch / cache cwuser's groups and properties. This is especially - # useful for internal sessions to avoid security insertions - cwuser.groups - cwuser.properties - return cwuser + with cnx.ensure_cnx_set: + cls = self.vreg['etypes'].etype_class('CWUser') + st = cls.fetch_rqlst(cnx.user, ordermethod=None) + st.add_eid_restriction(st.get_variable('X'), 'x', 'Substitute') + rset = cnx.execute(st.as_string(), {'x': eid}) + assert len(rset) == 1, rset + cwuser = rset.get_entity(0, 0) + # pylint: disable=W0104 + # prefetch / cache cwuser's groups and properties. This is especially + # useful for internal sessions to avoid security insertions + cwuser.groups + cwuser.properties + return cwuser # public (dbapi) interface ################################################ + @deprecated("[3.19] use _cw.call_service('repo_stats')") def stats(self): # XXX restrict to managers session? """Return a dictionary containing some statistics about the repository resources usage. This is a public method, not requiring a session id. + + This method is deprecated in favor of using _cw.call_service('repo_stats') """ - results = {} - querier = self.querier - source = self.system_source - for size, maxsize, hits, misses, title in ( - (len(querier._rql_cache), self.config['rql-cache-size'], - querier.cache_hit, querier.cache_miss, 'rqlt_st'), - (len(source._cache), self.config['rql-cache-size'], - source.cache_hit, source.cache_miss, 'sql'), - ): - results['%s_cache_size' % title] = '%s / %s' % (size, maxsize) - results['%s_cache_hit' % title] = hits - results['%s_cache_miss' % title] = misses - results['%s_cache_hit_percent' % title] = (hits * 100) / (hits + misses) - results['type_source_cache_size'] = len(self._type_source_cache) - results['extid_cache_size'] = len(self._extid_cache) - results['sql_no_cache'] = self.system_source.no_cache - results['nb_open_sessions'] = len(self._sessions) - results['nb_active_threads'] = threading.activeCount() - looping_tasks = self._tasks_manager._looping_tasks - results['looping_tasks'] = ', '.join(str(t) for t in looping_tasks) - results['available_cnxsets'] = self._cnxsets_pool.qsize() - results['threads'] = ', '.join(sorted(str(t) for t in threading.enumerate())) - return results + with self.internal_cnx() as cnx: + return cnx.call_service('repo_stats') + @deprecated("[3.19] use _cw.call_service('repo_gc_stats')") def gc_stats(self, nmax=20): """Return a dictionary containing some statistics about the repository memory usage. @@ -551,33 +520,8 @@ nmax is the max number of (most) referenced object returned as the 'referenced' result """ - - from cubicweb._gcdebug import gc_info - from cubicweb.appobject import AppObject - from cubicweb.rset import ResultSet - from cubicweb.dbapi import Connection, Cursor - from cubicweb.web.request import CubicWebRequestBase - from rql.stmts import Union - - lookupclasses = (AppObject, - Union, ResultSet, - Connection, Cursor, - CubicWebRequestBase) - try: - from cubicweb.server.session import Session, InternalSession - lookupclasses += (InternalSession, Session) - except ImportError: - pass # no server part installed - - results = {} - counters, ocounters, garbage = gc_info(lookupclasses, - viewreferrersclasses=()) - values = sorted(counters.iteritems(), key=lambda x: x[1], reverse=True) - results['lookupclasses'] = values - values = sorted(ocounters.iteritems(), key=lambda x: x[1], reverse=True)[:nmax] - results['referenced'] = values - results['unreachable'] = len(garbage) - return results + with self.internal_cnx() as cnx: + return cnx.call_service('repo_gc_stats', nmax=nmax) def get_schema(self): """Return the instance schema. @@ -600,31 +544,17 @@ return cubes def get_option_value(self, option, foreid=None): - """Return the value for `option` in the configuration. If `foreid` is - specified, the actual repository to which this entity belongs is - derefenced and the option value retrieved from it. + """Return the value for `option` in the configuration. This is a public method, not requiring a session id. + + `foreid` argument is deprecated and now useless (as of 3.19). """ + if foreid is not None: + warn('[3.19] foreid argument is deprecated', DeprecationWarning, + stacklevel=2) # XXX we may want to check we don't give sensible information - # XXX the only cube using 'foreid', apycot, stop used this, we probably - # want to drop this argument - if foreid is None: - return self.config[option] - _, sourceuri, extid, _ = self.type_and_source_from_eid(foreid) - if sourceuri == 'system': - return self.config[option] - cnxset = self._get_cnxset() - try: - cnx = cnxset.connection(sourceuri) - # needed to check connection is valid and usable by the current - # thread - newcnx = self.sources_by_uri[sourceuri].check_connection(cnx) - if newcnx is not None: - cnx = newcnx - return cnx.get_option_value(option, extid) - finally: - self._free_cnxset(cnxset) + return self.config[option] @cached def get_versions(self, checkversions=False): @@ -635,8 +565,8 @@ """ from logilab.common.changelog import Version vcconf = {} - with self.internal_session() as session: - for pk, version in session.execute( + with self.internal_cnx() as cnx: + for pk, version in cnx.execute( 'Any K,V WHERE P is CWProperty, P value V, P pkey K, ' 'P pkey ~="system.version.%"', build_descr=False): cube = pk.split('.')[-1] @@ -674,49 +604,22 @@ This is a public method, not requiring a session id. """ - with self.internal_session() as session: - # don't use session.execute, we don't want rset.req set - return self.querier.execute(session, 'Any K,V WHERE P is CWProperty,' + with self.internal_cnx() as cnx: + # don't use cnx.execute, we don't want rset.req set + return self.querier.execute(cnx, 'Any K,V WHERE P is CWProperty,' 'P pkey K, P value V, NOT P for_user U', build_descr=False) - # XXX protect this method: anonymous should be allowed and registration - # plugged + @deprecated("[3.19] Use session.call_service('register_user') instead'") def register_user(self, login, password, email=None, **kwargs): """check a user with the given login exists, if not create it with the given password. This method is designed to be used for anonymous registration on public web site. """ - with self.internal_session() as session: - # for consistency, keep same error as unique check hook (although not required) - errmsg = session._('the value "%s" is already used, use another one') - if (session.execute('CWUser X WHERE X login %(login)s', {'login': login}, - build_descr=False) - or session.execute('CWUser X WHERE X use_email C, C address %(login)s', - {'login': login}, build_descr=False)): - qname = role_name('login', 'subject') - raise ValidationError(None, {qname: errmsg % login}) - # we have to create the user - user = self.vreg['etypes'].etype_class('CWUser')(session) - if isinstance(password, unicode): - # password should *always* be utf8 encoded - password = password.encode('UTF8') - kwargs['login'] = login - kwargs['upassword'] = password - self.glob_add_entity(session, EditedEntity(user, **kwargs)) - session.execute('SET X in_group G WHERE X eid %(x)s, G name "users"', - {'x': user.eid}) - if email or '@' in login: - d = {'login': login, 'email': email or login} - if session.execute('EmailAddress X WHERE X address %(email)s', d, - build_descr=False): - qname = role_name('address', 'subject') - raise ValidationError(None, {qname: errmsg % d['email']}) - session.execute('INSERT EmailAddress X: X address %(email)s, ' - 'U primary_email X, U use_email X ' - 'WHERE U login %(login)s', d, build_descr=False) - session.commit() - return True + with self.internal_cnx() as cnx: + cnx.call_service('register_user', login=login, password=password, + email=email, **kwargs) + cnx.commit() def find_users(self, fetch_attrs, **query_attrs): """yield user attributes for cwusers matching the given query_attrs @@ -749,16 +652,16 @@ return rset.rows def connect(self, login, **kwargs): - """open a connection for a given user + """open a session for a given user raise `AuthenticationError` if the authentication failed raise `ConnectionError` if we can't open a connection """ cnxprops = kwargs.pop('cnxprops', None) # use an internal connection - with self.internal_session() as session: + with self.internal_cnx() as cnx: # try to get a user object - user = self.authenticate_user(session, login, **kwargs) + user = self.authenticate_user(cnx, login, **kwargs) session = Session(user, self, cnxprops) if threading.currentThread() in self._pyro_sessions: # assume no pyro client does one get_repository followed by @@ -768,13 +671,14 @@ self._pyro_sessions[threading.currentThread()] = session user._cw = user.cw_rset.req = session user.cw_clear_relation_cache() - self._sessions[session.id] = session - self.info('opened session %s for user %s', session.id, login) - self.hm.call_hooks('session_open', session) - # commit session at this point in case write operation has been done - # during `session_open` hooks - session.commit() - return session.id + self._sessions[session.sessionid] = session + self.info('opened session %s for user %s', session.sessionid, login) + with session.new_cnx() as cnx: + self.hm.call_hooks('session_open', cnx) + # commit connection at this point in case write operation has been + # done during `session_open` hooks + cnx.commit() + return session.sessionid def execute(self, sessionid, rqlstring, args=None, build_descr=True, txid=None): @@ -804,13 +708,35 @@ finally: session.free_cnxset() + @deprecated('[3.19] use .entity_metas(sessionid, eid, txid) instead') def describe(self, sessionid, eid, txid=None): """return a tuple `(type, physical source uri, extid, actual source uri)` for the entity of the given `eid` + + As of 3.19, physical source uri is always the system source. """ session = self._get_session(sessionid, setcnxset=True, txid=txid) try: - return self.type_and_source_from_eid(eid, session) + etype, extid, source = self.type_and_source_from_eid(eid, session) + return etype, source, extid, source + finally: + session.free_cnxset() + + def entity_metas(self, sessionid, eid, txid=None): + """return a dictionary containing meta-datas for the entity of the given + `eid`. Available keys are: + + * 'type', the entity's type name, + + * 'source', the name of the source from which this entity's coming from, + + * 'extid', the identifierfor this entity in its originating source, as + an encoded string or `None` for entities from the 'system' source. + """ + session = self._get_session(sessionid, setcnxset=True, txid=txid) + try: + etype, extid, source = self.type_and_source_from_eid(eid, session) + return {'type': etype, 'source': source, 'extid': extid} finally: session.free_cnxset() @@ -820,6 +746,7 @@ """ return self._get_session(sessionid, setcnxset=False).timestamp + @deprecated('[3.19] use session or transaction data') def get_shared_data(self, sessionid, key, default=None, pop=False, txdata=False): """return value associated to key in the session's data dictionary or session's transaction's data if `txdata` is true. @@ -832,6 +759,7 @@ session = self._get_session(sessionid, setcnxset=False) return session.get_shared_data(key, default, pop, txdata) + @deprecated('[3.19] use session or transaction data') def set_shared_data(self, sessionid, key, value, txdata=False): """set value associated to `key` in shared data @@ -847,7 +775,7 @@ self.debug('begin commit for session %s', sessionid) try: session = self._get_session(sessionid) - session.set_tx(txid) + session.set_cnx(txid) return session.commit() except (ValidationError, Unauthorized): raise @@ -860,7 +788,7 @@ self.debug('begin rollback for session %s', sessionid) try: session = self._get_session(sessionid) - session.set_tx(txid) + session.set_cnx(txid) session.rollback() except Exception: self.exception('unexpected error') @@ -868,47 +796,30 @@ def close(self, sessionid, txid=None, checkshuttingdown=True): """close the session with the given id""" - session = self._get_session(sessionid, setcnxset=True, txid=txid, + session = self._get_session(sessionid, txid=txid, checkshuttingdown=checkshuttingdown) # operation uncommited before close are rolled back before hook is called - session.rollback(free_cnxset=False) - self.hm.call_hooks('session_close', session) - # commit session at this point in case write operation has been done - # during `session_close` hooks - session.commit() + if session._cnx._session_handled: + session._cnx.rollback(free_cnxset=False) + with session.new_cnx() as cnx: + self.hm.call_hooks('session_close', cnx) + # commit connection at this point in case write operation has been + # done during `session_close` hooks + cnx.commit() session.close() if threading.currentThread() in self._pyro_sessions: self._pyro_sessions[threading.currentThread()] = None del self._sessions[sessionid] self.info('closed session %s for user %s', sessionid, session.user.login) - def call_service(self, sessionid, regid, async, **kwargs): + def call_service(self, sessionid, regid, **kwargs): """ See :class:`cubicweb.dbapi.Connection.call_service` and :class:`cubicweb.server.Service` """ + # XXX lack a txid session = self._get_session(sessionid) - return self._call_service_with_session(session, regid, async, **kwargs) - - def _call_service_with_session(self, session, regid, async, **kwargs): - if async: - self.info('calling service %s asynchronously', regid) - def task(): - session.set_cnxset() - try: - service = session.vreg['services'].select(regid, session, **kwargs) - return service.call(**kwargs) - finally: - session.rollback() # free cnxset - self.threaded_task(task) - else: - self.info('calling service %s synchronously', regid) - session.set_cnxset() - try: - service = session.vreg['services'].select(regid, session, **kwargs) - return service.call(**kwargs) - finally: - session.free_cnxset() + return session._cnx.call_service(regid, **kwargs) def user_info(self, sessionid, props=None): """this method should be used by client to: @@ -953,25 +864,6 @@ finally: session.free_cnxset() - # public (inter-repository) interface ##################################### - - def entities_modified_since(self, etypes, mtime): - """function designed to be called from an external repository which - is using this one as a rql source for synchronization, and return a - 3-uple containing : - * the local date - * list of (etype, eid) of entities of the given types which have been - modified since the given timestamp (actually entities whose full text - index content has changed) - * list of (etype, eid) of entities of the given types which have been - deleted since the given timestamp - """ - with self.internal_session() as session: - updatetime = datetime.utcnow() - modentities, delentities = self.system_source.modified_entities( - session, etypes, mtime) - return updatetime, modentities, delentities - # session handling ######################################################## def close_sessions(self): @@ -988,27 +880,49 @@ """ mintime = time() - self.cleanup_session_time self.debug('cleaning session unused since %s', - strftime('%T', localtime(mintime))) + strftime('%H:%M:%S', localtime(mintime))) nbclosed = 0 for session in self._sessions.values(): if session.timestamp < mintime: - self.close(session.id) + self.close(session.sessionid) nbclosed += 1 return nbclosed + @deprecated("[3.19] use internal_cnx now\n" + "(Beware that integrity hook are now enabled by default)") def internal_session(self, cnxprops=None, safe=False): """return a dbapi like connection/cursor using internal user which have every rights on the repository. The `safe` argument is a boolean flag telling if integrity hooks should be activated or not. + /!\ the safe argument is False by default. + *YOU HAVE TO* commit/rollback or close (rollback implicitly) the session once the job's done, else you'll leak connections set up to the time where no one is available, causing irremediable freeze... """ - session = InternalSession(self, cnxprops, safe) + session = InternalSession(self, cnxprops) + if not safe: + session.disable_hook_categories('integrity') + session.disable_hook_categories('security') + session._cnx.ctx_count += 1 session.set_cnxset() return session + @contextmanager + def internal_cnx(self): + """Context manager returning a Connection using internal user which have + every access rights on the repository. + + Beware that unlike the older :meth:`internal_session`, internal + connections have all hooks beside security enabled. + """ + with InternalSession(self) as session: + with session.new_cnx() as cnx: + with cnx.security_enabled(read=False, write=False): + with cnx.ensure_cnx_set: + yield cnx + def _get_session(self, sessionid, setcnxset=False, txid=None, checkshuttingdown=True): """return the session associated with the given session identifier""" @@ -1019,7 +933,7 @@ except KeyError: raise BadConnectionId('No such session %s' % sessionid) if setcnxset: - session.set_tx(txid) # must be done before set_cnxset + session.set_cnx(txid) # must be done before set_cnxset session.set_cnxset() return session @@ -1027,9 +941,9 @@ # * correspondance between eid and (type, source) # * correspondance between eid and local id (i.e. specific to a given source) - def type_and_source_from_eid(self, eid, session=None): - """return a tuple `(type, physical source uri, extid, actual source - uri)` for the entity of the given `eid` + def type_and_source_from_eid(self, eid, cnx): + """return a tuple `(type, extid, actual source uri)` for the entity of + the given `eid` """ try: eid = int(eid) @@ -1038,21 +952,9 @@ try: return self._type_source_cache[eid] except KeyError: - if session is None: - session = self.internal_session() - free_cnxset = True - else: - free_cnxset = False - try: - etype, uri, extid, auri = self.system_source.eid_type_source( - session, eid) - finally: - if free_cnxset: - session.free_cnxset() - self._type_source_cache[eid] = (etype, uri, extid, auri) - if uri != 'system': - self._extid_cache[(extid, uri)] = eid - return etype, uri, extid, auri + etype, extid, auri = self.system_source.eid_type_source(cnx, eid) + self._type_source_cache[eid] = (etype, extid, auri) + return etype, extid, auri def clear_caches(self, eids): etcache = self._type_source_cache @@ -1060,22 +962,17 @@ rqlcache = self.querier._rql_cache for eid in eids: try: - etype, uri, extid, auri = etcache.pop(int(eid)) # may be a string in some cases + etype, extid, auri = etcache.pop(int(eid)) # may be a string in some cases rqlcache.pop( ('%s X WHERE X eid %s' % (etype, eid),), None) - extidcache.pop((extid, uri), None) + extidcache.pop(extid, None) except KeyError: etype = None rqlcache.pop( ('Any X WHERE X eid %s' % eid,), None) - for source in self.sources: - source.clear_eid_cache(eid, etype) + self.system_source.clear_eid_cache(eid, etype) - def type_from_eid(self, eid, session=None): + def type_from_eid(self, eid, cnx): """return the type of the entity with id """ - return self.type_and_source_from_eid(eid, session)[0] - - def source_from_eid(self, eid, session=None): - """return the source for the given entity's eid""" - return self.sources_by_uri[self.type_and_source_from_eid(eid, session)[1]] + return self.type_and_source_from_eid(eid, cnx)[0] def querier_cache_key(self, session, rql, args, eidkeys): cachekey = [rql] @@ -1092,16 +989,8 @@ args[key] = int(args[key]) return tuple(cachekey) - def eid2extid(self, source, eid, session=None): - """get local id from an eid""" - etype, uri, extid, _ = self.type_and_source_from_eid(eid, session) - if source.uri != uri: - # eid not from the given source - raise UnknownEid(eid) - return extid - - def extid2eid(self, source, extid, etype, session=None, insert=True, - complete=True, commit=True, sourceparams=None): + def extid2eid(self, source, extid, etype, cnx, insert=True, + sourceparams=None): """Return eid from a local id. If the eid is a negative integer, that means the entity is known but has been copied back to the system source hence should be ignored. @@ -1124,101 +1013,80 @@ 6. unless source's :attr:`should_call_hooks` tell otherwise, 'before_add_entity' hooks are called """ - uri = 'system' if source.copy_based_source else source.uri - cachekey = (extid, uri) try: - return self._extid_cache[cachekey] + return self._extid_cache[extid] except KeyError: pass - free_cnxset = False - if session is None: - session = self.internal_session() - free_cnxset = True - eid = self.system_source.extid2eid(session, uri, extid) + try: + # bw compat: cnx may be a session, get at the Connection + cnx = cnx._cnx + except AttributeError: + pass + with cnx.ensure_cnx_set: + eid = self.system_source.extid2eid(cnx, extid) if eid is not None: - self._extid_cache[cachekey] = eid - self._type_source_cache[eid] = (etype, uri, extid, source.uri) - if free_cnxset: - session.free_cnxset() + self._extid_cache[extid] = eid + self._type_source_cache[eid] = (etype, extid, source.uri) return eid if not insert: return - # no link between extid and eid, create one using an internal session - # since the current session user may not have required permissions to - # do necessary stuff and we don't want to commit user session. - # - # Moreover, even if session is already an internal session but is - # processing a commit, we have to use another one - if not session.is_internal_session: - session = self.internal_session() - free_cnxset = True - try: - eid = self.system_source.create_eid(session) - self._extid_cache[cachekey] = eid - self._type_source_cache[eid] = (etype, uri, extid, source.uri) - entity = source.before_entity_insertion( - session, extid, etype, eid, sourceparams) - if source.should_call_hooks: - # get back a copy of operation for later restore if necessary, - # see below - pending_operations = session.pending_operations[:] - self.hm.call_hooks('before_add_entity', session, entity=entity) - self.add_info(session, entity, source, extid, complete=complete) - source.after_entity_insertion(session, extid, entity, sourceparams) - if source.should_call_hooks: - self.hm.call_hooks('after_add_entity', session, entity=entity) - if commit or free_cnxset: - session.commit(free_cnxset) - return eid - except Exception: - if commit or free_cnxset: - session.rollback(free_cnxset) - else: + # no link between extid and eid, create one + with cnx.ensure_cnx_set: + # write query, ensure connection's mode is 'write' so connections + # won't be released until commit/rollback + cnx.mode = 'write' + try: + eid = self.system_source.create_eid(cnx) + self._extid_cache[extid] = eid + self._type_source_cache[eid] = (etype, extid, source.uri) + entity = source.before_entity_insertion( + cnx, extid, etype, eid, sourceparams) + if source.should_call_hooks: + # get back a copy of operation for later restore if + # necessary, see below + pending_operations = cnx.pending_operations[:] + self.hm.call_hooks('before_add_entity', cnx, entity=entity) + self.add_info(cnx, entity, source, extid) + source.after_entity_insertion(cnx, extid, entity, sourceparams) + if source.should_call_hooks: + self.hm.call_hooks('after_add_entity', cnx, entity=entity) + return eid + except Exception: # XXX do some cleanup manually so that the transaction has a # chance to be commited, with simply this entity discarded - self._extid_cache.pop(cachekey, None) + self._extid_cache.pop(extid, None) self._type_source_cache.pop(eid, None) if 'entity' in locals(): - hook.CleanupDeletedEidsCacheOp.get_instance(session).add_data(entity.eid) - self.system_source.delete_info_multi(session, [entity], uri) + hook.CleanupDeletedEidsCacheOp.get_instance(cnx).add_data(entity.eid) + self.system_source.delete_info_multi(cnx, [entity]) if source.should_call_hooks: - session._tx.pending_operations = pending_operations - raise + cnx.pending_operations = pending_operations + raise - def add_info(self, session, entity, source, extid=None, complete=True): + def add_info(self, session, entity, source, extid=None): """add type and source info for an eid into the system table, and index the entity with the full text index """ # begin by inserting eid/type/source/extid into the entities table hook.CleanupNewEidsCacheOp.get_instance(session).add_data(entity.eid) - self.system_source.add_info(session, entity, source, extid, complete) + self.system_source.add_info(session, entity, source, extid) - def delete_info(self, session, entity, sourceuri, scleanup=None): + def delete_info(self, session, entity, sourceuri): """called by external source when some entity known by the system source has been deleted in the external source """ # mark eid as being deleted in session info and setup cache update # operation hook.CleanupDeletedEidsCacheOp.get_instance(session).add_data(entity.eid) - self._delete_info(session, entity, sourceuri, scleanup) + self._delete_info(session, entity, sourceuri) - def _delete_info(self, session, entity, sourceuri, scleanup=None): + def _delete_info(self, session, entity, sourceuri): """delete system information on deletion of an entity: * delete all remaining relations from/to this entity - - * call delete info on the system source which will transfer record from - the entities table to the deleted_entities table - - When scleanup is specified, it's expected to be the source's eid, in - which case we'll specify the target's relation source so that this - source is ignored. E.g. we want to delete relations stored locally, as - the deletion information comes from the external source, it's its - responsability to have cleaned-up its own relations. + * call delete info on the system source """ pendingrtypes = session.transaction_data.get('pendingrtypes', ()) - if scleanup is not None: - source = self.sources_by_eid[scleanup] # delete remaining relations: if user can delete the entity, he can # delete all its relations without security checking with session.security_enabled(read=False, write=False): @@ -1233,34 +1101,20 @@ rql = 'DELETE X %s Y WHERE X eid %%(x)s' % rtype else: rql = 'DELETE Y %s X WHERE X eid %%(x)s' % rtype - if scleanup is not None: - # if the relation can't be crossed, nothing to cleanup (we - # would get a BadRQLQuery from the multi-sources planner). - # This may still leave some junk if the mapping has changed - # at some point, but one can still run db-check to catch - # those - if not source in self.can_cross_relation(rtype): - continue - # source cleaning: only delete relations stored locally - # (here, scleanup - rql += ', NOT (Y cw_source S, S eid %(seid)s)' try: - session.execute(rql, {'x': eid, 'seid': scleanup}, - build_descr=False) + session.execute(rql, {'x': eid}, build_descr=False) except Exception: if self.config.mode == 'test': raise self.exception('error while cascading delete for entity %s ' 'from %s. RQL: %s', entity, sourceuri, rql) - self.system_source.delete_info_multi(session, [entity], sourceuri) + self.system_source.delete_info_multi(session, [entity]) - def _delete_info_multi(self, session, entities, sourceuri, scleanup=None): + def _delete_info_multi(self, session, entities): """same as _delete_info but accepts a list of entities with the same etype and belinging to the same source. """ pendingrtypes = session.transaction_data.get('pendingrtypes', ()) - if scleanup is not None: - source = self.sources_by_eid[scleanup] # delete remaining relations: if user can delete the entity, he can # delete all its relations without security checking with session.security_enabled(read=False, write=False): @@ -1275,77 +1129,36 @@ rql = 'DELETE X %s Y WHERE X eid IN (%s)' % (rtype, in_eids) else: rql = 'DELETE Y %s X WHERE X eid IN (%s)' % (rtype, in_eids) - if scleanup is not None: - # if the relation can't be crossed, nothing to cleanup (we - # would get a BadRQLQuery from the multi-sources planner). - # This may still leave some junk if the mapping has changed - # at some point, but one can still run db-check to catch - # those - if not source in self.can_cross_relation(rtype): - continue - # source cleaning: only delete relations stored locally - rql += ', NOT (Y cw_source S, S eid %(seid)s)' try: - session.execute(rql, {'seid': scleanup}, build_descr=False) + session.execute(rql, build_descr=False) except ValidationError: raise except Unauthorized: - self.exception('Unauthorized exception while cascading delete for entity %s ' - 'from %s. RQL: %s.\nThis should not happen since security is disabled here.', - entities, sourceuri, rql) + self.exception('Unauthorized exception while cascading delete for entity %s. ' + 'RQL: %s.\nThis should not happen since security is disabled here.', + entities, rql) raise except Exception: if self.config.mode == 'test': raise - self.exception('error while cascading delete for entity %s ' - 'from %s. RQL: %s', entities, sourceuri, rql) - self.system_source.delete_info_multi(session, entities, sourceuri) + self.exception('error while cascading delete for entity %s. RQL: %s', + entities, rql) + self.system_source.delete_info_multi(session, entities) - def locate_relation_source(self, session, subject, rtype, object): - subjsource = self.source_from_eid(subject, session) - objsource = self.source_from_eid(object, session) - if not subjsource is objsource: - source = self.system_source - if not (subjsource.may_cross_relation(rtype) - and objsource.may_cross_relation(rtype)): - raise MultiSourcesError( - "relation %s can't be crossed among sources" - % rtype) - elif not subjsource.support_relation(rtype): - source = self.system_source - else: - source = subjsource - if not source.support_relation(rtype, True): - raise MultiSourcesError( - "source %s doesn't support write of %s relation" - % (source.uri, rtype)) - return source - - def locate_etype_source(self, etype): - for source in self.sources: - if source.support_entity(etype, 1): - return source - else: - raise ETypeNotSupportedBySources(etype) - - def init_entity_caches(self, session, entity, source): - """add entity to session entities cache and repo's extid cache. + def init_entity_caches(self, cnx, entity, source): + """add entity to connection entities cache and repo's extid cache. Return entity's ext id if the source isn't the system source. """ - session.set_entity_cache(entity) - suri = source.uri - if suri == 'system': + cnx.set_entity_cache(entity) + if source.uri == 'system': extid = None else: - if source.copy_based_source: - suri = 'system' extid = source.get_extid(entity) - self._extid_cache[(str(extid), suri)] = entity.eid - self._type_source_cache[entity.eid] = (entity.cw_etype, suri, extid, - source.uri) + self._extid_cache[str(extid)] = entity.eid + self._type_source_cache[entity.eid] = (entity.cw_etype, extid, source.uri) return extid - def glob_add_entity(self, session, edited): + def glob_add_entity(self, cnx, edited): """add an entity to the repository the entity eid should originaly be None and a unique eid is assigned to @@ -1355,40 +1168,38 @@ entity._cw_is_saved = False # entity has an eid but is not yet saved # init edited_attributes before calling before_add_entity hooks entity.cw_edited = edited - source = self.locate_etype_source(entity.cw_etype) + source = self.system_source # allocate an eid to the entity before calling hooks - entity.eid = self.system_source.create_eid(session) + entity.eid = self.system_source.create_eid(cnx) # set caches asap - extid = self.init_entity_caches(session, entity, source) + extid = self.init_entity_caches(cnx, entity, source) if server.DEBUG & server.DBG_REPO: print 'ADD entity', self, entity.cw_etype, entity.eid, edited prefill_entity_caches(entity) - if source.should_call_hooks: - self.hm.call_hooks('before_add_entity', session, entity=entity) - relations = preprocess_inlined_relations(session, entity) + self.hm.call_hooks('before_add_entity', cnx, entity=entity) + relations = preprocess_inlined_relations(cnx, entity) edited.set_defaults() - if session.is_hook_category_activated('integrity'): + if cnx.is_hook_category_activated('integrity'): edited.check(creation=True) try: - source.add_entity(session, entity) + source.add_entity(cnx, entity) except UniqueTogetherError as exc: - userhdlr = session.vreg['adapters'].select( - 'IUserFriendlyError', session, entity=entity, exc=exc) + userhdlr = cnx.vreg['adapters'].select( + 'IUserFriendlyError', cnx, entity=entity, exc=exc) userhdlr.raise_user_exception() - self.add_info(session, entity, source, extid, complete=False) + self.add_info(cnx, entity, source, extid) edited.saved = entity._cw_is_saved = True # trigger after_add_entity after after_add_relation - if source.should_call_hooks: - self.hm.call_hooks('after_add_entity', session, entity=entity) - # call hooks for inlined relations - for attr, value in relations: - self.hm.call_hooks('before_add_relation', session, - eidfrom=entity.eid, rtype=attr, eidto=value) - self.hm.call_hooks('after_add_relation', session, - eidfrom=entity.eid, rtype=attr, eidto=value) + self.hm.call_hooks('after_add_entity', cnx, entity=entity) + # call hooks for inlined relations + for attr, value in relations: + self.hm.call_hooks('before_add_relation', cnx, + eidfrom=entity.eid, rtype=attr, eidto=value) + self.hm.call_hooks('after_add_relation', cnx, + eidfrom=entity.eid, rtype=attr, eidto=value) return entity.eid - def glob_update_entity(self, session, edited): + def glob_update_entity(self, cnx, edited): """replace an entity in the repository the type and the eid of an entity must not be changed """ @@ -1398,13 +1209,13 @@ entity.cw_attr_cache, edited hm = self.hm eschema = entity.e_schema - session.set_entity_cache(entity) + cnx.set_entity_cache(entity) orig_edited = getattr(entity, 'cw_edited', None) entity.cw_edited = edited + source = self.system_source try: only_inline_rels, need_fti_update = True, False relations = [] - source = self.source_from_eid(entity.eid, session) for attr in list(edited): if attr == 'eid': continue @@ -1420,117 +1231,101 @@ previous_value = previous_value[0][0] # got a result set if previous_value == entity.cw_attr_cache[attr]: previous_value = None - elif source.should_call_hooks: - hm.call_hooks('before_delete_relation', session, + else: + hm.call_hooks('before_delete_relation', cnx, eidfrom=entity.eid, rtype=attr, eidto=previous_value) relations.append((attr, edited[attr], previous_value)) - if source.should_call_hooks: - # call hooks for inlined relations - for attr, value, _t in relations: - hm.call_hooks('before_add_relation', session, - eidfrom=entity.eid, rtype=attr, eidto=value) - if not only_inline_rels: - hm.call_hooks('before_update_entity', session, entity=entity) - if session.is_hook_category_activated('integrity'): + # call hooks for inlined relations + for attr, value, _t in relations: + hm.call_hooks('before_add_relation', cnx, + eidfrom=entity.eid, rtype=attr, eidto=value) + if not only_inline_rels: + hm.call_hooks('before_update_entity', cnx, entity=entity) + if cnx.is_hook_category_activated('integrity'): edited.check() try: - source.update_entity(session, entity) + source.update_entity(cnx, entity) edited.saved = True except UniqueTogetherError as exc: - userhdlr = session.vreg['adapters'].select( - 'IUserFriendlyError', session, entity=entity, exc=exc) + userhdlr = cnx.vreg['adapters'].select( + 'IUserFriendlyError', cnx, entity=entity, exc=exc) userhdlr.raise_user_exception() - self.system_source.update_info(session, entity, need_fti_update) - if source.should_call_hooks: - if not only_inline_rels: - hm.call_hooks('after_update_entity', session, entity=entity) - for attr, value, prevvalue in relations: - # if the relation is already cached, update existant cache - relcache = entity.cw_relation_cached(attr, 'subject') - if prevvalue is not None: - hm.call_hooks('after_delete_relation', session, - eidfrom=entity.eid, rtype=attr, eidto=prevvalue) - if relcache is not None: - session.update_rel_cache_del(entity.eid, attr, prevvalue) - del_existing_rel_if_needed(session, entity.eid, attr, value) - session.update_rel_cache_add(entity.eid, attr, value) - hm.call_hooks('after_add_relation', session, - eidfrom=entity.eid, rtype=attr, eidto=value) + self.system_source.update_info(cnx, entity, need_fti_update) + if not only_inline_rels: + hm.call_hooks('after_update_entity', cnx, entity=entity) + for attr, value, prevvalue in relations: + # if the relation is already cached, update existant cache + relcache = entity.cw_relation_cached(attr, 'subject') + if prevvalue is not None: + hm.call_hooks('after_delete_relation', cnx, + eidfrom=entity.eid, rtype=attr, eidto=prevvalue) + if relcache is not None: + cnx.update_rel_cache_del(entity.eid, attr, prevvalue) + del_existing_rel_if_needed(cnx, entity.eid, attr, value) + cnx.update_rel_cache_add(entity.eid, attr, value) + hm.call_hooks('after_add_relation', cnx, + eidfrom=entity.eid, rtype=attr, eidto=value) finally: if orig_edited is not None: entity.cw_edited = orig_edited - def glob_delete_entities(self, session, eids): + def glob_delete_entities(self, cnx, eids): """delete a list of entities and all related entities from the repository""" - # mark eids as being deleted in session info and setup cache update + # mark eids as being deleted in cnx info and setup cache update # operation (register pending eids before actual deletion to avoid # multiple call to glob_delete_entities) - op = hook.CleanupDeletedEidsCacheOp.get_instance(session) + op = hook.CleanupDeletedEidsCacheOp.get_instance(cnx) if not isinstance(eids, (set, frozenset)): warn('[3.13] eids should be given as a set', DeprecationWarning, stacklevel=2) eids = frozenset(eids) eids = eids - op._container op._container |= eids - data_by_etype_source = {} # values are ([list of eids], - # [list of extid], - # [list of entities]) + data_by_etype = {} # values are [list of entities] # # WARNING: the way this dictionary is populated is heavily optimized # and does not use setdefault on purpose. Unless a new release # of the Python interpreter advertises large perf improvements # in setdefault, this should not be changed without profiling. - for eid in eids: - etype, sourceuri, extid, _ = self.type_and_source_from_eid(eid, session) + etype = self.type_from_eid(eid, cnx) # XXX should cache entity's cw_metainformation - entity = session.entity_from_eid(eid, etype) + entity = cnx.entity_from_eid(eid, etype) try: - data_by_etype_source[(etype, sourceuri)].append(entity) + data_by_etype[etype].append(entity) except KeyError: - data_by_etype_source[(etype, sourceuri)] = [entity] - for (etype, sourceuri), entities in data_by_etype_source.iteritems(): + data_by_etype[etype] = [entity] + source = self.system_source + for etype, entities in data_by_etype.iteritems(): if server.DEBUG & server.DBG_REPO: print 'DELETE entities', etype, [entity.eid for entity in entities] - source = self.sources_by_uri[sourceuri] - if source.should_call_hooks: - self.hm.call_hooks('before_delete_entity', session, entities=entities) - if session.deleted_in_transaction(source.eid): - # source is being deleted, think to give scleanup argument - self._delete_info_multi(session, entities, sourceuri, scleanup=source.eid) - else: - self._delete_info_multi(session, entities, sourceuri) - source.delete_entities(session, entities) - if source.should_call_hooks: - self.hm.call_hooks('after_delete_entity', session, entities=entities) + self.hm.call_hooks('before_delete_entity', cnx, entities=entities) + self._delete_info_multi(cnx, entities) + source.delete_entities(cnx, entities) + self.hm.call_hooks('after_delete_entity', cnx, entities=entities) # don't clear cache here, it is done in a hook on commit - def glob_add_relation(self, session, subject, rtype, object): + def glob_add_relation(self, cnx, subject, rtype, object): """add a relation to the repository""" - self.glob_add_relations(session, {rtype: [(subject, object)]}) + self.glob_add_relations(cnx, {rtype: [(subject, object)]}) - def glob_add_relations(self, session, relations): + def glob_add_relations(self, cnx, relations): """add several relations to the repository relations is a dictionary rtype: [(subj_eid, obj_eid), ...] """ - sources = {} + source = self.system_source + relations_by_rtype = {} subjects_by_types = {} objects_by_types = {} - activintegrity = session.is_hook_category_activated('activeintegrity') + activintegrity = cnx.is_hook_category_activated('activeintegrity') for rtype, eids_subj_obj in relations.iteritems(): if server.DEBUG & server.DBG_REPO: for subjeid, objeid in eids_subj_obj: print 'ADD relation', subjeid, rtype, objeid for subjeid, objeid in eids_subj_obj: - source = self.locate_relation_source(session, subjeid, rtype, objeid) - if source not in sources: - relations_by_rtype = {} - sources[source] = relations_by_rtype - else: - relations_by_rtype = sources[source] if rtype in relations_by_rtype: relations_by_rtype[rtype].append((subjeid, objeid)) else: @@ -1540,13 +1335,13 @@ # take care to relation of cardinality '?1', as all eids will # be inserted later, we've remove duplicated eids since they # won't be catched by `del_existing_rel_if_needed` - rdef = session.rtype_eids_rdef(rtype, subjeid, objeid) + rdef = cnx.rtype_eids_rdef(rtype, subjeid, objeid) card = rdef.cardinality if card[0] in '?1': - with session.security_enabled(read=False): - session.execute('DELETE X %s Y WHERE X eid %%(x)s, ' - 'NOT Y eid %%(y)s' % rtype, - {'x': subjeid, 'y': objeid}) + with cnx.security_enabled(read=False): + cnx.execute('DELETE X %s Y WHERE X eid %%(x)s, ' + 'NOT Y eid %%(y)s' % rtype, + {'x': subjeid, 'y': objeid}) subjects = subjects_by_types.setdefault(rdef, {}) if subjeid in subjects: del relations_by_rtype[rtype][subjects[subjeid]] @@ -1554,45 +1349,40 @@ continue subjects[subjeid] = len(relations_by_rtype[rtype]) - 1 if card[1] in '?1': - with session.security_enabled(read=False): - session.execute('DELETE X %s Y WHERE Y eid %%(y)s, ' - 'NOT X eid %%(x)s' % rtype, - {'x': subjeid, 'y': objeid}) + with cnx.security_enabled(read=False): + cnx.execute('DELETE X %s Y WHERE Y eid %%(y)s, ' + 'NOT X eid %%(x)s' % rtype, + {'x': subjeid, 'y': objeid}) objects = objects_by_types.setdefault(rdef, {}) if objeid in objects: del relations_by_rtype[rtype][objects[objeid]] objects[objeid] = len(relations_by_rtype[rtype]) continue objects[objeid] = len(relations_by_rtype[rtype]) - for source, relations_by_rtype in sources.iteritems(): - if source.should_call_hooks: - for rtype, source_relations in relations_by_rtype.iteritems(): - self.hm.call_hooks('before_add_relation', session, - rtype=rtype, eids_from_to=source_relations) - for rtype, source_relations in relations_by_rtype.iteritems(): - source.add_relations(session, rtype, source_relations) - rschema = self.schema.rschema(rtype) - for subjeid, objeid in source_relations: - session.update_rel_cache_add(subjeid, rtype, objeid, rschema.symmetric) - if source.should_call_hooks: - for rtype, source_relations in relations_by_rtype.iteritems(): - self.hm.call_hooks('after_add_relation', session, - rtype=rtype, eids_from_to=source_relations) + for rtype, source_relations in relations_by_rtype.iteritems(): + self.hm.call_hooks('before_add_relation', cnx, + rtype=rtype, eids_from_to=source_relations) + for rtype, source_relations in relations_by_rtype.iteritems(): + source.add_relations(cnx, rtype, source_relations) + rschema = self.schema.rschema(rtype) + for subjeid, objeid in source_relations: + cnx.update_rel_cache_add(subjeid, rtype, objeid, rschema.symmetric) + for rtype, source_relations in relations_by_rtype.iteritems(): + self.hm.call_hooks('after_add_relation', cnx, + rtype=rtype, eids_from_to=source_relations) - def glob_delete_relation(self, session, subject, rtype, object): + def glob_delete_relation(self, cnx, subject, rtype, object): """delete a relation from the repository""" if server.DEBUG & server.DBG_REPO: print 'DELETE relation', subject, rtype, object - source = self.locate_relation_source(session, subject, rtype, object) - if source.should_call_hooks: - self.hm.call_hooks('before_delete_relation', session, - eidfrom=subject, rtype=rtype, eidto=object) - source.delete_relation(session, subject, rtype, object) + source = self.system_source + self.hm.call_hooks('before_delete_relation', cnx, + eidfrom=subject, rtype=rtype, eidto=object) + source.delete_relation(cnx, subject, rtype, object) rschema = self.schema.rschema(rtype) - session.update_rel_cache_del(subject, rtype, object, rschema.symmetric) - if source.should_call_hooks: - self.hm.call_hooks('after_delete_relation', session, - eidfrom=subject, rtype=rtype, eidto=object) + cnx.update_rel_cache_del(subject, rtype, object, rschema.symmetric) + self.hm.call_hooks('after_delete_relation', cnx, + eidfrom=subject, rtype=rtype, eidto=object) # pyro handling ########################################################### @@ -1643,7 +1433,7 @@ # client was not yet connected to the repo return if not session.closed: - self.close(session.id) + self.close(session.sessionid) daemon.removeConnection = removeConnection return daemon @@ -1655,35 +1445,10 @@ self.info('repository re-registered as a pyro object %s', self.pyro_appid) - # multi-sources planner helpers ########################################### - - @cached - def rel_type_sources(self, rtype): - warn('[3.18] old multi-source system will go away in the next version', - DeprecationWarning) - return tuple([source for source in self.sources - if source.support_relation(rtype) - or rtype in source.dont_cross_relations]) - - @cached - def can_cross_relation(self, rtype): - warn('[3.18] old multi-source system will go away in the next version', - DeprecationWarning) - return tuple([source for source in self.sources - if source.support_relation(rtype) - and rtype in source.cross_relations]) - - @cached - def is_multi_sources_relation(self, rtype): - warn('[3.18] old multi-source system will go away in the next version', - DeprecationWarning) - return any(source for source in self.sources - if not source is self.system_source - and source.support_relation(rtype)) # these are overridden by set_log_methods below # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None + info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None def pyro_unregister(config): diff -r 84738d495ffd -r 793377697c81 server/schemaserial.py --- a/server/schemaserial.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/schemaserial.py Wed Sep 24 18:04:30 2014 +0200 @@ -20,7 +20,6 @@ __docformat__ = "restructuredtext en" import os -from itertools import chain import json from logilab.common.shellutils import ProgressBar @@ -28,13 +27,13 @@ from yams import (BadSchemaDefinition, schema as schemamod, buildobjs as ybo, schema2sql as y2sql) -from cubicweb import CW_SOFTWARE_ROOT, Binary, typed_eid +from cubicweb import Binary from cubicweb.schema import (KNOWN_RPROPERTIES, CONSTRAINTS, ETYPE_NAME_MAP, - VIRTUAL_RTYPES, PURE_VIRTUAL_RTYPES) + VIRTUAL_RTYPES) from cubicweb.server import sqlutils -def group_mapping(cursor, interactive=True): +def group_mapping(cnx, interactive=True): """create a group mapping from an rql cursor A group mapping has standard group names as key (managers, owners at least) @@ -43,7 +42,7 @@ from the user. """ res = {} - for eid, name in cursor.execute('Any G, N WHERE G is CWGroup, G name N', + for eid, name in cnx.execute('Any G, N WHERE G is CWGroup, G name N', build_descr=False): res[name] = eid if not interactive: @@ -75,33 +74,33 @@ break return res -def cstrtype_mapping(cursor): +def cstrtype_mapping(cnx): """cached constraint types mapping""" - map = dict(cursor.execute('Any T, X WHERE X is CWConstraintType, X name T')) + map = dict(cnx.execute('Any T, X WHERE X is CWConstraintType, X name T')) return map # schema / perms deserialization ############################################## -def deserialize_schema(schema, session): +def deserialize_schema(schema, cnx): """return a schema according to information stored in an rql database as CWRType and CWEType entities """ - repo = session.repo + repo = cnx.repo dbhelper = repo.system_source.dbhelper # XXX bw compat (3.6 migration) - sqlcu = session.cnxset['system'] - sqlcu.execute("SELECT * FROM cw_CWRType WHERE cw_name='symetric'") - if sqlcu.fetchall(): - sql = dbhelper.sql_rename_col('cw_CWRType', 'cw_symetric', 'cw_symmetric', - dbhelper.TYPE_MAPPING['Boolean'], True) - sqlcu.execute(sql) - sqlcu.execute("UPDATE cw_CWRType SET cw_name='symmetric' WHERE cw_name='symetric'") - session.commit(False) + with cnx.ensure_cnx_set: + sqlcu = cnx.system_sql("SELECT * FROM cw_CWRType WHERE cw_name='symetric'") + if sqlcu.fetchall(): + sql = dbhelper.sql_rename_col('cw_CWRType', 'cw_symetric', 'cw_symmetric', + dbhelper.TYPE_MAPPING['Boolean'], True) + sqlcu.execute(sql) + sqlcu.execute("UPDATE cw_CWRType SET cw_name='symmetric' WHERE cw_name='symetric'") + cnx.commit(False) ertidx = {} copiedeids = set() - permsidx = deserialize_ertype_permissions(session) + permsidx = deserialize_ertype_permissions(cnx) schema.reading_from_database = True - for eid, etype, desc in session.execute( + for eid, etype, desc in cnx.execute( 'Any X, N, D WHERE X is CWEType, X name N, X description D', build_descr=False): # base types are already in the schema, skip them @@ -115,7 +114,7 @@ needcopy = False netype = ETYPE_NAME_MAP[etype] # can't use write rql queries at this point, use raw sql - sqlexec = session.system_sql + sqlexec = cnx.system_sql if sqlexec('SELECT 1 FROM %(p)sCWEType WHERE %(p)sname=%%(n)s' % {'p': sqlutils.SQL_PREFIX}, {'n': netype}).fetchone(): # the new type already exists, we should copy (eg make existing @@ -132,17 +131,12 @@ sqlexec(alter_table_sql) sqlexec('UPDATE entities SET type=%(n)s WHERE type=%(x)s', {'x': etype, 'n': netype}) - session.commit(False) - try: - sqlexec('UPDATE deleted_entities SET type=%(n)s WHERE type=%(x)s', - {'x': etype, 'n': netype}) - except Exception: - pass + cnx.commit(False) tocleanup = [eid] tocleanup += (eid for eid, cached in repo._type_source_cache.iteritems() if etype == cached[0]) repo.clear_caches(tocleanup) - session.commit(False) + cnx.commit(False) if needcopy: ertidx[eid] = netype copiedeids.add(eid) @@ -154,14 +148,14 @@ eschema = schema.add_entity_type( ybo.EntityType(name=etype, description=desc, eid=eid)) set_perms(eschema, permsidx) - for etype, stype in session.execute( + for etype, stype in cnx.execute( 'Any XN, ETN WHERE X is CWEType, X name XN, X specializes ET, ET name ETN', build_descr=False): etype = ETYPE_NAME_MAP.get(etype, etype) stype = ETYPE_NAME_MAP.get(stype, stype) schema.eschema(etype)._specialized_type = stype schema.eschema(stype)._specialized_by.append(etype) - for eid, rtype, desc, sym, il, ftc in session.execute( + for eid, rtype, desc, sym, il, ftc in cnx.execute( 'Any X,N,D,S,I,FTC WHERE X is CWRType, X name N, X description D, ' 'X symmetric S, X inlined I, X fulltext_container FTC', build_descr=False): ertidx[eid] = rtype @@ -169,7 +163,7 @@ ybo.RelationType(name=rtype, description=desc, symmetric=bool(sym), inlined=bool(il), fulltext_container=ftc, eid=eid)) - cstrsidx = deserialize_rdef_constraints(session) + cstrsidx = deserialize_rdef_constraints(cnx) pendingrdefs = [] # closure to factorize common code of attribute/relation rdef addition def _add_rdef(rdefeid, seid, reid, oeid, **kwargs): @@ -198,13 +192,13 @@ set_perms(rdefs, permsidx) # Get the type parameters for additional base types. try: - extra_props = dict(session.execute('Any X, XTP WHERE X is CWAttribute, ' + extra_props = dict(cnx.execute('Any X, XTP WHERE X is CWAttribute, ' 'X extra_props XTP')) except Exception: - session.critical('Previous CRITICAL notification about extra_props is not ' + cnx.critical('Previous CRITICAL notification about extra_props is not ' 'a problem if you are migrating to cubicweb 3.17') extra_props = {} # not yet in the schema (introduced by 3.17 migration) - for values in session.execute( + for values in cnx.execute( 'Any X,SE,RT,OE,CARD,ORD,DESC,IDX,FTIDX,I18N,DFLT WHERE X is CWAttribute,' 'X relation_type RT, X cardinality CARD, X ordernum ORD, X indexed IDX,' 'X description DESC, X internationalizable I18N, X defaultval DFLT,' @@ -222,7 +216,7 @@ cardinality=card, description=desc, order=ord, indexed=idx, fulltextindexed=ftidx, internationalizable=i18n, default=default, **typeparams) - for values in session.execute( + for values in cnx.execute( 'Any X,SE,RT,OE,CARD,ORD,DESC,C WHERE X is CWRelation, X relation_type RT,' 'X cardinality CARD, X ordernum ORD, X description DESC, ' 'X from_entity SE, X to_entity OE, X composite C', build_descr=False): @@ -238,7 +232,7 @@ if rdefs is not None: set_perms(rdefs, permsidx) unique_togethers = {} - rset = session.execute( + rset = cnx.execute( 'Any X,E,R WHERE ' 'X is CWUniqueTogetherConstraint, ' 'X constraint_of E, X relations R', build_descr=False) @@ -257,11 +251,11 @@ for eschema, unique_together in unique_togethers.itervalues(): eschema._unique_together.append(tuple(sorted(unique_together))) schema.infer_specialization_rules() - session.commit() + cnx.commit() schema.reading_from_database = False -def deserialize_ertype_permissions(session): +def deserialize_ertype_permissions(cnx): """return sect action:groups associations for the given entity or relation schema with its eid, according to schema's permissions stored in the database as [read|add|delete|update]_permission @@ -270,21 +264,21 @@ res = {} for action in ('read', 'add', 'update', 'delete'): rql = 'Any E,N WHERE G is CWGroup, G name N, E %s_permission G' % action - for eid, gname in session.execute(rql, build_descr=False): + for eid, gname in cnx.execute(rql, build_descr=False): res.setdefault(eid, {}).setdefault(action, []).append(gname) rql = ('Any E,X,EXPR,V WHERE X is RQLExpression, X expression EXPR, ' 'E %s_permission X, X mainvars V' % action) - for eid, expreid, expr, mainvars in session.execute(rql, build_descr=False): + for eid, expreid, expr, mainvars in cnx.execute(rql, build_descr=False): # we don't know yet if it's a rql expr for an entity or a relation, # so append a tuple to differentiate from groups and so we'll be # able to instantiate it later res.setdefault(eid, {}).setdefault(action, []).append( (expr, mainvars, expreid) ) return res -def deserialize_rdef_constraints(session): +def deserialize_rdef_constraints(cnx): """return the list of relation definition's constraints as instances""" res = {} - for rdefeid, ceid, ct, val in session.execute( + for rdefeid, ceid, ct, val in cnx.execute( 'Any E, X,TN,V WHERE E constrained_by X, X is CWConstraint, ' 'X cstrtype T, T name TN, X value V', build_descr=False): cstr = CONSTRAINTS[ct].deserialize(val) @@ -311,7 +305,7 @@ # schema / perms serialization ################################################ -def serialize_schema(cursor, schema): +def serialize_schema(cnx, schema): """synchronize schema and permissions in the database according to current schema """ @@ -319,7 +313,7 @@ if not quiet: _title = '-> storing the schema in the database ' print _title, - execute = cursor.execute + execute = cnx.execute eschemas = schema.entities() if not quiet: pb_size = (len(eschemas + schema.relations()) @@ -328,7 +322,7 @@ pb = ProgressBar(pb_size, title=_title) else: pb = None - groupmap = group_mapping(cursor, interactive=False) + groupmap = group_mapping(cnx, interactive=False) # serialize all entity types, assuring CWEType is serialized first for proper # is / is_instance_of insertion eschemas.remove(schema.eschema('CWEType')) diff -r 84738d495ffd -r 793377697c81 server/server.py --- a/server/server.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/server.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -19,13 +19,9 @@ __docformat__ = "restructuredtext en" -import os -import sys import select -import warnings from time import localtime, mktime -from cubicweb.cwconfig import CubicWebConfiguration from cubicweb.server.utils import TasksManager from cubicweb.server.repository import Repository diff -r 84738d495ffd -r 793377697c81 server/serverconfig.py --- a/server/serverconfig.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/serverconfig.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -24,7 +24,7 @@ from StringIO import StringIO import logilab.common.configuration as lgconfig -from logilab.common.decorators import wproperty, cached +from logilab.common.decorators import cached from cubicweb.toolsutils import read_config, restrict_perms_to_user from cubicweb.cwconfig import CONFIGURATIONS, CubicWebConfiguration @@ -297,13 +297,16 @@ # configuration file (#16102) @cached def read_sources_file(self): + """return a dictionary of values found in the sources file""" return read_config(self.sources_file(), raise_if_unreadable=True) - def sources(self): - """return a dictionnaries containing sources definitions indexed by - sources'uri - """ - return self.read_sources_file() + @property + def system_source_config(self): + return self.read_sources_file()['system'] + + @property + def default_admin_config(self): + return self.read_sources_file()['admin'] def source_enabled(self, source): if self.sources_mode is not None: diff -r 84738d495ffd -r 793377697c81 server/serverctl.py --- a/server/serverctl.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/serverctl.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -132,17 +132,19 @@ return cnx def repo_cnx(config): - """return a in-memory repository and a db api connection it""" - from cubicweb.dbapi import in_memory_repo_cnx + """return a in-memory repository and a repoapi connection to it""" + from cubicweb import repoapi from cubicweb.server.utils import manager_userpasswd try: - login = config.sources()['admin']['login'] - pwd = config.sources()['admin']['password'] + login = config.default_admin_config['login'] + pwd = config.default_admin_config['password'] except KeyError: login, pwd = manager_userpasswd() while True: try: - return in_memory_repo_cnx(config, login, password=pwd) + repo = repoapi.get_repository(config=config) + cnx = repoapi.connect(repo, login, password=pwd) + return repo, cnx except AuthenticationError: print '-> Error: wrong user/password.' # reset cubes else we'll have an assertion error on next retry @@ -221,7 +223,7 @@ def cleanup(self): """remove instance's configuration and database""" from logilab.database import get_db_helper - source = self.config.sources()['system'] + source = self.config.system_source_config dbname = source['db-name'] helper = get_db_helper(source['db-driver']) if ASK.confirm('Delete database %s ?' % dbname): @@ -334,7 +336,7 @@ automatic = self.get('automatic') appid = args.pop() config = ServerConfiguration.config_for(appid) - source = config.sources()['system'] + source = config.system_source_config dbname = source['db-name'] driver = source['db-driver'] helper = get_db_helper(driver) @@ -441,7 +443,7 @@ appid = args[0] config = ServerConfiguration.config_for(appid) try: - system = config.sources()['system'] + system = config.system_source_config extra_args = system.get('db-extra-arguments') extra = extra_args and {'extra_args': extra_args} or {} get_connection( @@ -457,7 +459,7 @@ init_repository(config, drop=self.config.drop) if not self.config.automatic: while ASK.confirm('Enter another source ?', default_is_yes=False): - CWCTL.run(['add-source', '--config-level', + CWCTL.run(['source-add', '--config-level', str(self.config.config_level), config.appid]) @@ -467,7 +469,7 @@ the identifier of the instance to initialize. """ - name = 'add-source' + name = 'source-add' arguments = '' min_args = max_args = 1 options = ( @@ -482,43 +484,43 @@ config = ServerConfiguration.config_for(appid) config.quick_start = True repo, cnx = repo_cnx(config) - req = cnx.request() - used = set(n for n, in req.execute('Any SN WHERE S is CWSource, S name SN')) - cubes = repo.get_cubes() - while True: - type = raw_input('source type (%s): ' - % ', '.join(sorted(SOURCE_TYPES))) - if type not in SOURCE_TYPES: - print '-> unknown source type, use one of the available types.' - continue - sourcemodule = SOURCE_TYPES[type].module - if not sourcemodule.startswith('cubicweb.'): - # module names look like cubes.mycube.themodule - sourcecube = SOURCE_TYPES[type].module.split('.', 2)[1] - # if the source adapter is coming from an external component, - # ensure it's specified in used cubes - if not sourcecube in cubes: - print ('-> this source type require the %s cube which is ' - 'not used by the instance.') + with cnx: + used = set(n for n, in cnx.execute('Any SN WHERE S is CWSource, S name SN')) + cubes = repo.get_cubes() + while True: + type = raw_input('source type (%s): ' + % ', '.join(sorted(SOURCE_TYPES))) + if type not in SOURCE_TYPES: + print '-> unknown source type, use one of the available types.' continue - break - while True: - sourceuri = raw_input('source identifier (a unique name used to ' - 'tell sources apart): ').strip() - if not sourceuri: - print '-> mandatory.' - else: - sourceuri = unicode(sourceuri, sys.stdin.encoding) - if sourceuri in used: - print '-> uri already used, choose another one.' + sourcemodule = SOURCE_TYPES[type].module + if not sourcemodule.startswith('cubicweb.'): + # module names look like cubes.mycube.themodule + sourcecube = SOURCE_TYPES[type].module.split('.', 2)[1] + # if the source adapter is coming from an external component, + # ensure it's specified in used cubes + if not sourcecube in cubes: + print ('-> this source type require the %s cube which is ' + 'not used by the instance.') + continue + break + while True: + sourceuri = raw_input('source identifier (a unique name used to ' + 'tell sources apart): ').strip() + if not sourceuri: + print '-> mandatory.' else: - break - # XXX configurable inputlevel - sconfig = ask_source_config(config, type, inputlevel=self.config.config_level) - cfgstr = unicode(generate_source_config(sconfig), sys.stdin.encoding) - req.create_entity('CWSource', name=sourceuri, - type=unicode(type), config=cfgstr) - cnx.commit() + sourceuri = unicode(sourceuri, sys.stdin.encoding) + if sourceuri in used: + print '-> uri already used, choose another one.' + else: + break + # XXX configurable inputlevel + sconfig = ask_source_config(config, type, inputlevel=self.config.config_level) + cfgstr = unicode(generate_source_config(sconfig), sys.stdin.encoding) + cnx.create_entity('CWSource', name=sourceuri, + type=unicode(type), config=cfgstr) + cnx.commit() class GrantUserOnInstanceCommand(Command): @@ -544,7 +546,7 @@ from cubicweb.server.sqlutils import sqlexec, sqlgrants appid, user = args config = ServerConfiguration.config_for(appid) - source = config.sources()['system'] + source = config.system_source_config set_owner = self.config.set_owner cnx = system_source_cnx(source, special_privs='GRANT') cursor = cnx.cursor() @@ -675,6 +677,7 @@ def run(self, args): from logilab.common.daemon import daemonize, setugid from cubicweb.cwctl import init_cmdline_log_threshold + print 'WARNING: Standalone repository with pyro or zmq access is deprecated' appid = args[0] debug = self['debug'] if sys.platform == 'win32' and not debug: @@ -734,12 +737,12 @@ mih.backup_database(output, askconfirm=False, format=format) mih.shutdown() -def _local_restore(appid, backupfile, drop, systemonly=True, format='native'): +def _local_restore(appid, backupfile, drop, format='native'): config = ServerConfiguration.config_for(appid) config.verbosity = 1 # else we won't be asked for confirmation on problems config.quick_start = True mih = config.migration_handler(connect=False, verbosity=1) - mih.restore_database(backupfile, drop, systemonly, askconfirm=False, format=format) + mih.restore_database(backupfile, drop, askconfirm=False, format=format) repo = mih.repo_connect() # version of the database dbversions = repo.get_versions() @@ -848,13 +851,6 @@ 'help': 'for some reason the database doesn\'t exist and so ' 'should not be dropped.'} ), - ('restore-all', - {'short': 'r', 'action' : 'store_true', 'default' : False, - 'help': 'restore everything, eg not only the system source database ' - 'but also data for all sources supporting backup/restore and custom ' - 'instance data. In that case, is expected to be the ' - 'timestamp of the backup to restore, not a file'} - ), ('format', {'short': 'f', 'default': 'native', 'type': 'choice', 'choices': ('native', 'portable'), @@ -874,7 +870,6 @@ raise _local_restore(appid, backupfile, drop=not self.config.no_drop, - systemonly=not self.config.restore_all, format=self.config.format) if self.config.format == 'portable': try: @@ -985,9 +980,12 @@ appid = args[0] config = ServerConfiguration.config_for(appid) config.repairing = self.config.force - repo, cnx = repo_cnx(config) - check(repo, cnx, - self.config.checks, self.config.reindex, self.config.autofix) + repo, _cnx = repo_cnx(config) + with repo.internal_cnx() as cnx: + check(repo, cnx, + self.config.checks, + self.config.reindex, + self.config.autofix) class RebuildFTICommand(Command): @@ -1009,29 +1007,9 @@ etypes = args or None config = ServerConfiguration.config_for(appid) repo, cnx = repo_cnx(config) - session = repo._get_session(cnx.sessionid, setcnxset=True) - reindex_entities(repo.schema, session, etypes=etypes) - cnx.commit() - - -class SynchronizeInstanceSchemaCommand(Command): - """Synchronize persistent schema with cube schema. - - Will synchronize common stuff between the cube schema and the - actual persistent schema, but will not add/remove any entity or relation. - - - the identifier of the instance to synchronize. - """ - name = 'schema-sync' - arguments = '' - min_args = max_args = 1 - - def run(self, args): - appid = args[0] - config = ServerConfiguration.config_for(appid) - mih = config.migration_handler() - mih.cmd_synchronize_schema() + with cnx: + reindex_entities(repo.schema, cnx._cnx, etypes=etypes) + cnx.commit() class SynchronizeSourceCommand(Command): @@ -1102,7 +1080,7 @@ diff_tool = args.pop(0) config = ServerConfiguration.config_for(appid) repo, cnx = repo_cnx(config) - session = repo._get_session(cnx.sessionid, setcnxset=True) + cnx.close() fsschema = config.load_schema(expand_cubes=True) schema_diff(fsschema, repo.schema, permissionshandler, diff_tool, ignore=('eid',)) @@ -1112,7 +1090,7 @@ StartRepositoryCommand, DBDumpCommand, DBRestoreCommand, DBCopyCommand, AddSourceCommand, CheckRepositoryCommand, RebuildFTICommand, - SynchronizeInstanceSchemaCommand, SynchronizeSourceCommand, SchemaDiffCommand, + SynchronizeSourceCommand, SchemaDiffCommand, ): CWCTL.register(cmdclass) diff -r 84738d495ffd -r 793377697c81 server/session.py --- a/server/session.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/session.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -23,12 +23,14 @@ from time import time from uuid import uuid4 from warnings import warn +import functools +from contextlib import contextmanager from logilab.common.deprecation import deprecated from logilab.common.textutils import unormalize from logilab.common.registry import objectify_predicate -from cubicweb import UnknownEid, QueryError, schema, server +from cubicweb import QueryError, schema, server, ProgrammingError from cubicweb.req import RequestSessionBase from cubicweb.utils import make_uid from cubicweb.rqlrewrite import RQLRewriter @@ -96,59 +98,75 @@ return obj.deny_all_hooks_but(*categories) -class _hooks_control(object): +class _hooks_control(object): # XXX repoapi: remove me when + # session stop being connection """context manager to control activated hooks categories. - If mode is session.`HOOKS_DENY_ALL`, given hooks categories will + If mode is `HOOKS_DENY_ALL`, given hooks categories will be enabled. - If mode is session.`HOOKS_ALLOW_ALL`, given hooks categories will + If mode is `HOOKS_ALLOW_ALL`, given hooks categories will be disabled. .. sourcecode:: python - with _hooks_control(self.session, self.session.HOOKS_ALLOW_ALL, 'integrity'): + with _hooks_control(cnx, HOOKS_ALLOW_ALL, 'integrity'): # ... do stuff with all but 'integrity' hooks activated - with _hooks_control(self.session, self.session.HOOKS_DENY_ALL, 'integrity'): + with _hooks_control(cnx, HOOKS_DENY_ALL, 'integrity'): # ... do stuff with none but 'integrity' hooks activated - This is an internal api, you should rather use - :meth:`~cubicweb.server.session.Session.deny_all_hooks_but` or - :meth:`~cubicweb.server.session.Session.allow_all_hooks_but` session - methods. + This is an internal API, you should rather use + :meth:`~cubicweb.server.session.Connection.deny_all_hooks_but` or + :meth:`~cubicweb.server.session.Connection.allow_all_hooks_but` + Connection methods. """ - def __init__(self, session, mode, *categories): + def __init__(self, cnx, mode, *categories): assert mode in (HOOKS_ALLOW_ALL, HOOKS_DENY_ALL) - self.session = session - self.tx = session._tx + self.cnx = cnx self.mode = mode self.categories = categories self.oldmode = None self.changes = () def __enter__(self): - self.oldmode = self.tx.hooks_mode - self.tx.hooks_mode = self.mode + self.oldmode = self.cnx.hooks_mode + self.cnx.hooks_mode = self.mode if self.mode is HOOKS_DENY_ALL: - self.changes = self.tx.enable_hook_categories(*self.categories) + self.changes = self.cnx.enable_hook_categories(*self.categories) else: - self.changes = self.tx.disable_hook_categories(*self.categories) - self.tx.ctx_count += 1 + self.changes = self.cnx.disable_hook_categories(*self.categories) + self.cnx.ctx_count += 1 def __exit__(self, exctype, exc, traceback): - self.tx.ctx_count -= 1 - if self.tx.ctx_count == 0: - self.session._clear_thread_storage(self.tx) - else: - try: - if self.categories: - if self.mode is HOOKS_DENY_ALL: - self.tx.disable_hook_categories(*self.categories) - else: - self.tx.enable_hook_categories(*self.categories) - finally: - self.tx.hooks_mode = self.oldmode + self.cnx.ctx_count -= 1 + try: + if self.categories: + if self.mode is HOOKS_DENY_ALL: + self.cnx.disable_hook_categories(*self.categories) + else: + self.cnx.enable_hook_categories(*self.categories) + finally: + self.cnx.hooks_mode = self.oldmode + +class _session_hooks_control(_hooks_control): # XXX repoapi: remove me when + # session stop being connection + """hook control context manager for session + + Necessary to handle some unholy transaction scope logic.""" + + + def __init__(self, session, mode, *categories): + self.session = session + super_init = super(_session_hooks_control, self).__init__ + super_init(session._cnx, mode, *categories) + + def __exit__(self, exctype, exc, traceback): + super_exit = super(_session_hooks_control, self).__exit__ + ret = super_exit(exctype, exc, traceback) + if self.cnx.ctx_count == 0: + self.session._close_cnx(self.cnx) + return ret @deprecated('[3.17] use .security_enabled instead') def security_enabled(obj, *args, **kwargs): @@ -160,9 +178,8 @@ By default security is disabled on queries executed on the repository side. """ - def __init__(self, session, read=None, write=None): - self.session = session - self.tx = session._tx + def __init__(self, cnx, read=None, write=None): + self.cnx = cnx self.read = read self.write = write self.oldread = None @@ -172,24 +189,39 @@ if self.read is None: self.oldread = None else: - self.oldread = self.tx.read_security - self.tx.read_security = self.read + self.oldread = self.cnx.read_security + self.cnx.read_security = self.read if self.write is None: self.oldwrite = None else: - self.oldwrite = self.tx.write_security - self.tx.write_security = self.write - self.tx.ctx_count += 1 + self.oldwrite = self.cnx.write_security + self.cnx.write_security = self.write + self.cnx.ctx_count += 1 def __exit__(self, exctype, exc, traceback): - self.tx.ctx_count -= 1 - if self.tx.ctx_count == 0: - self.session._clear_thread_storage(self.tx) - else: - if self.oldread is not None: - self.tx.read_security = self.oldread - if self.oldwrite is not None: - self.tx.write_security = self.oldwrite + self.cnx.ctx_count -= 1 + if self.oldread is not None: + self.cnx.read_security = self.oldread + if self.oldwrite is not None: + self.cnx.write_security = self.oldwrite + +class _session_security_enabled(_security_enabled): + """hook security context manager for session + + Necessary To handle some unholy transaction scope logic.""" + + + def __init__(self, session, read=None, write=None): + self.session = session + super_init = super(_session_security_enabled, self).__init__ + super_init(session._cnx, read=read, write=write) + + def __exit__(self, exctype, exc, traceback): + super_exit = super(_session_security_enabled, self).__exit__ + ret = super_exit(exctype, exc, traceback) + if self.cnx.ctx_count == 0: + self.session._close_cnx(self.cnx) + return ret HOOKS_ALLOW_ALL = object() HOOKS_DENY_ALL = object() @@ -199,13 +231,13 @@ pass class CnxSetTracker(object): - """Keep track of which transaction use which cnxset. + """Keep track of which connection use which cnxset. - There should be one of these object per session (including internal sessions). + There should be one of these objects per session (including internal sessions). - Session objects are responsible of creating their CnxSetTracker object. + Session objects are responsible for creating their CnxSetTracker object. - Transactions should use the :meth:`record` and :meth:`forget` to inform the + Connections should use the :meth:`record` and :meth:`forget` to inform the tracker of cnxsets they have acquired. .. automethod:: cubicweb.server.session.CnxSetTracker.record @@ -231,13 +263,13 @@ def __exit__(self, *args): return self._condition.__exit__(*args) - def record(self, txid, cnxset): - """Inform the tracker that a txid has acquired a cnxset + def record(self, cnxid, cnxset): + """Inform the tracker that a cnxid has acquired a cnxset - This method is to be used by Transaction objects. + This method is to be used by Connection objects. This method fails when: - - The txid already has a recorded cnxset. + - The cnxid already has a recorded cnxset. - The tracker is not active anymore. Notes about the caller: @@ -264,19 +296,19 @@ with self._condition: if not self._active: raise SessionClosedError('Closed') - old = self._record.get(txid) + old = self._record.get(cnxid) if old is not None: - raise ValueError('transaction "%s" already has a cnx_set (%r)' - % (txid, old)) - self._record[txid] = cnxset + raise ValueError('connection "%s" already has a cnx_set (%r)' + % (cnxid, old)) + self._record[cnxid] = cnxset - def forget(self, txid, cnxset): - """Inform the tracker that a txid have release a cnxset + def forget(self, cnxid, cnxset): + """Inform the tracker that a cnxid have release a cnxset - This methode is to be used by Transaction object. + This methode is to be used by Connection object. This method fails when: - - The cnxset for the txid does not match the recorded one. + - The cnxset for the cnxid does not match the recorded one. Notes about the caller: (1) It is responsible for releasing the cnxset. @@ -296,11 +328,11 @@ cnxset = repo._free_cnxset(cnxset) # (1) """ with self._condition: - old = self._record.get(txid, None) + old = self._record.get(cnxid, None) if old is not cnxset: raise ValueError('recorded cnxset for "%s" mismatch: %r != %r' - % (txid, old, cnxset)) - self._record.pop(txid) + % (cnxid, old, cnxset)) + self._record.pop(cnxid) self._condition.notify_all() def close(self): @@ -318,7 +350,7 @@ This method is to be used by Session objects. - Returns a tuple of transaction ids that remain open. + Returns a tuple of connection ids that remain open. """ with self._condition: if self._active: @@ -330,10 +362,30 @@ timeout -= time() - start return tuple(self._record) -class Transaction(object): - """Repository Transaction + +def _with_cnx_set(func): + """decorator for Connection method that ensure they run with a cnxset """ + @functools.wraps(func) + def wrapper(cnx, *args, **kwargs): + with cnx.ensure_cnx_set: + return func(cnx, *args, **kwargs) + return wrapper - Holds all transaction related data +def _open_only(func): + """decorator for Connection method that check it is open""" + @functools.wraps(func) + def check_open(cnx, *args, **kwargs): + if not cnx._open: + raise ProgrammingError('Closed Connection: %s' + % cnx.connectionid) + return func(cnx, *args, **kwargs) + return check_open + + +class Connection(RequestSessionBase): + """Repository Connection + + Holds all connection related data Database connection resources: @@ -342,11 +394,11 @@ :attr:`cnxset`, the connections set to use to execute queries on sources. If the transaction is read only, the connection set may be freed between - actual queries. This allows multiple transactions with a reasonably low + actual queries. This allows multiple connections with a reasonably low connection set pool size. Control mechanism is detailed below. - .. automethod:: cubicweb.server.session.Transaction.set_cnxset - .. automethod:: cubicweb.server.session.Transaction.free_cnxset + .. automethod:: cubicweb.server.session.Connection.set_cnxset + .. automethod:: cubicweb.server.session.Connection.free_cnxset :attr:`mode`, string telling the connections set handling mode, may be one of 'read' (connections set may be freed), 'write' (some write was done in @@ -387,15 +439,40 @@ """ - def __init__(self, txid, session, rewriter): - #: transaction unique id - self.transactionid = txid + is_request = False + + def __init__(self, session, cnxid=None, session_handled=False): + # using super(Connection, self) confuse some test hack + RequestSessionBase.__init__(self, session.vreg) + # only the session provide explicite + if cnxid is not None: + assert session_handled # only session profive explicite cnxid + #: connection unique id + self._open = None + if cnxid is None: + cnxid = '%s-%s' % (session.sessionid, uuid4().hex) + self.connectionid = cnxid + self.sessionid = session.sessionid + #: self._session_handled + #: are the life cycle of this Connection automatically controlled by the + #: Session This is the old backward compatibility mode + self._session_handled = session_handled #: reentrance handling self.ctx_count = 0 + #: count the number of entry in a context needing a cnxset + self._cnxset_count = 0 + #: Boolean for compat with the older explicite set_cnxset/free_cnx API + #: When a call set_cnxset is done, no automatic freeing will be done + #: until free_cnx is called. + self._auto_free_cnx_set = True #: server.Repository object self.repo = session.repo self.vreg = self.repo.vreg + self._execute = self.repo.querier.execute + + # other session utility + self._session_timestamp = session._timestamp #: connection handling mode self.mode = session.default_mode @@ -403,11 +480,14 @@ self._cnxset = None #: CnxSetTracker used to report cnxset usage self._cnxset_tracker = session._cnxset_tracker - #: is this transaction from a client or internal to the repo + #: is this connection from a client or internal to the repo self.running_dbapi_query = True + # internal (root) session + self.is_internal_session = session.is_internal_session #: dict containing arbitrary data cleared at the end of the transaction - self.data = {} + self.transaction_data = {} + self._session_data = session.data #: ordered list of operations to be processed on commit/rollback self.pending_operations = [] #: (None, 'precommit', 'postcommit', 'uncommitable') @@ -432,118 +512,346 @@ self.undo_actions = config['undo-enabled'] # RQLRewriter are not thread safe - self._rewriter = rewriter + self._rewriter = RQLRewriter(self) + + # other session utility + if session.user.login == '__internal_manager__': + self.user = session.user + self.set_language(self.user.prefered_language()) + else: + self._set_user(session.user) + + + # live cycle handling #################################################### + + def __enter__(self): + assert self._open is None # first opening + self._open = True + return self + + def __exit__(self, exctype=None, excvalue=None, tb=None): + assert self._open # actually already open + assert self._cnxset_count == 0 + self.rollback() + self._open = False + + + + # shared data handling ################################################### @property - def transaction_data(self): - return self.data + def data(self): + return self._session_data + + @property + def rql_rewriter(self): + return self._rewriter + + @_open_only + @deprecated('[3.19] use session or transaction data') + def get_shared_data(self, key, default=None, pop=False, txdata=False): + """return value associated to `key` in session data""" + if txdata: + data = self.transaction_data + else: + data = self._session_data + if pop: + return data.pop(key, default) + else: + return data.get(key, default) + + @_open_only + @deprecated('[3.19] use session or transaction data') + def set_shared_data(self, key, value, txdata=False): + """set value associated to `key` in session data""" + if txdata: + self.transaction_data[key] = value + else: + self._session_data[key] = value def clear(self): """reset internal data""" - self.data = {} + self.transaction_data = {} #: ordered list of operations to be processed on commit/rollback self.pending_operations = [] #: (None, 'precommit', 'postcommit', 'uncommitable') self.commit_state = None self.pruned_hooks_cache = {} + self.local_perm_cache.clear() + self.rewriter = RQLRewriter(self) # Connection Set Management ############################################### @property + @_open_only def cnxset(self): return self._cnxset @cnxset.setter + @_open_only def cnxset(self, new_cnxset): with self._cnxset_tracker: old_cnxset = self._cnxset if new_cnxset is old_cnxset: return #nothing to do if old_cnxset is not None: + old_cnxset.rollback() self._cnxset = None self.ctx_count -= 1 - self._cnxset_tracker.forget(self.transactionid, old_cnxset) + self._cnxset_tracker.forget(self.connectionid, old_cnxset) if new_cnxset is not None: - self._cnxset_tracker.record(self.transactionid, new_cnxset) + self._cnxset_tracker.record(self.connectionid, new_cnxset) self._cnxset = new_cnxset self.ctx_count += 1 - def set_cnxset(self): - """the transaction need a connections set to execute some queries""" + @_open_only + def _set_cnxset(self): + """the connection need a connections set to execute some queries""" if self.cnxset is None: cnxset = self.repo._get_cnxset() try: self.cnxset = cnxset - try: - cnxset.cnxset_set() - except: - self.cnxset = None - raise except: self.repo._free_cnxset(cnxset) raise return self.cnxset - def free_cnxset(self, ignoremode=False): - """the transaction is no longer using its connections set, at least for some time""" + @_open_only + def _free_cnxset(self, ignoremode=False): + """the connection is no longer using its connections set, at least for some time""" # cnxset may be none if no operation has been done since last commit # or rollback cnxset = self.cnxset if cnxset is not None and (ignoremode or self.mode == 'read'): + assert self._cnxset_count == 0 try: self.cnxset = None finally: cnxset.cnxset_freed() self.repo._free_cnxset(cnxset) + @deprecated('[3.19] cnxset are automatically managed now.' + ' stop using explicit set and free.') + def set_cnxset(self): + self._auto_free_cnx_set = False + return self._set_cnxset() + + @deprecated('[3.19] cnxset are automatically managed now.' + ' stop using explicit set and free.') + def free_cnxset(self, ignoremode=False): + self._auto_free_cnx_set = True + return self._free_cnxset(ignoremode=ignoremode) + + + @property + @contextmanager + @_open_only + def ensure_cnx_set(self): + assert self._cnxset_count >= 0 + if self._cnxset_count == 0: + self._set_cnxset() + try: + self._cnxset_count += 1 + yield + finally: + self._cnxset_count = max(self._cnxset_count - 1, 0) + if self._cnxset_count == 0 and self._auto_free_cnx_set: + self._free_cnxset() + # Entity cache management ################################################# # - # The transaction entity cache as held in tx.data is removed at the - # end of the transaction (commit and rollback) + # The connection entity cache as held in cnx.transaction_data is removed at the + # end of the connection (commit and rollback) # - # XXX transaction level caching may be a pb with multiple repository + # XXX connection level caching may be a pb with multiple repository # instances, but 1. this is probably not the only one :$ and 2. it may be # an acceptable risk. Anyway we could activate it or not according to a # configuration option def set_entity_cache(self, entity): - """Add `entity` to the transaction entity cache""" - ecache = self.data.setdefault('ecache', {}) + """Add `entity` to the connection entity cache""" + # XXX not using _open_only because before at creation time. _set_user + # call this function to cache the Connection user. + if entity.cw_etype != 'CWUser' and not self._open: + raise ProgrammingError('Closed Connection: %s' + % self.connectionid) + ecache = self.transaction_data.setdefault('ecache', {}) ecache.setdefault(entity.eid, entity) + @_open_only def entity_cache(self, eid): """get cache entity for `eid`""" - return self.data['ecache'][eid] + return self.transaction_data['ecache'][eid] + @_open_only def cached_entities(self): """return the whole entity cache""" - return self.data.get('ecache', {}).values() + return self.transaction_data.get('ecache', {}).values() + @_open_only def drop_entity_cache(self, eid=None): """drop entity from the cache If eid is None, the whole cache is dropped""" if eid is None: - self.data.pop('ecache', None) + self.transaction_data.pop('ecache', None) else: - del self.data['ecache'][eid] + del self.transaction_data['ecache'][eid] + + # relations handling ####################################################### + + @_open_only + def add_relation(self, fromeid, rtype, toeid): + """provide direct access to the repository method to add a relation. + + This is equivalent to the following rql query: + + SET X rtype Y WHERE X eid fromeid, T eid toeid + + without read security check but also all the burden of rql execution. + You may use this in hooks when you know both eids of the relation you + want to add. + """ + self.add_relations([(rtype, [(fromeid, toeid)])]) + + @_open_only + def add_relations(self, relations): + '''set many relation using a shortcut similar to the one in add_relation + + relations is a list of 2-uples, the first element of each + 2-uple is the rtype, and the second is a list of (fromeid, + toeid) tuples + ''' + edited_entities = {} + relations_dict = {} + with self.security_enabled(False, False): + for rtype, eids in relations: + if self.vreg.schema[rtype].inlined: + for fromeid, toeid in eids: + if fromeid not in edited_entities: + entity = self.entity_from_eid(fromeid) + edited = EditedEntity(entity) + edited_entities[fromeid] = edited + else: + edited = edited_entities[fromeid] + edited.edited_attribute(rtype, toeid) + else: + relations_dict[rtype] = eids + self.repo.glob_add_relations(self, relations_dict) + for edited in edited_entities.itervalues(): + self.repo.glob_update_entity(self, edited) + + + @_open_only + def delete_relation(self, fromeid, rtype, toeid): + """provide direct access to the repository method to delete a relation. + + This is equivalent to the following rql query: + + DELETE X rtype Y WHERE X eid fromeid, T eid toeid + + without read security check but also all the burden of rql execution. + You may use this in hooks when you know both eids of the relation you + want to delete. + """ + with self.security_enabled(False, False): + if self.vreg.schema[rtype].inlined: + entity = self.entity_from_eid(fromeid) + entity.cw_attr_cache[rtype] = None + self.repo.glob_update_entity(self, entity, set((rtype,))) + else: + self.repo.glob_delete_relation(self, fromeid, rtype, toeid) + + # relations cache handling ################################################# + + @_open_only + def update_rel_cache_add(self, subject, rtype, object, symmetric=False): + self._update_entity_rel_cache_add(subject, rtype, 'subject', object) + if symmetric: + self._update_entity_rel_cache_add(object, rtype, 'subject', subject) + else: + self._update_entity_rel_cache_add(object, rtype, 'object', subject) + + @_open_only + def update_rel_cache_del(self, subject, rtype, object, symmetric=False): + self._update_entity_rel_cache_del(subject, rtype, 'subject', object) + if symmetric: + self._update_entity_rel_cache_del(object, rtype, 'object', object) + else: + self._update_entity_rel_cache_del(object, rtype, 'object', subject) + + @_open_only + def _update_entity_rel_cache_add(self, eid, rtype, role, targeteid): + try: + entity = self.entity_cache(eid) + except KeyError: + return + rcache = entity.cw_relation_cached(rtype, role) + if rcache is not None: + rset, entities = rcache + rset = rset.copy() + entities = list(entities) + rset.rows.append([targeteid]) + if not isinstance(rset.description, list): # else description not set + rset.description = list(rset.description) + rset.description.append([self.entity_metas(targeteid)['type']]) + targetentity = self.entity_from_eid(targeteid) + if targetentity.cw_rset is None: + targetentity.cw_rset = rset + targetentity.cw_row = rset.rowcount + targetentity.cw_col = 0 + rset.rowcount += 1 + entities.append(targetentity) + entity._cw_related_cache['%s_%s' % (rtype, role)] = ( + rset, tuple(entities)) + + @_open_only + def _update_entity_rel_cache_del(self, eid, rtype, role, targeteid): + try: + entity = self.entity_cache(eid) + except KeyError: + return + rcache = entity.cw_relation_cached(rtype, role) + if rcache is not None: + rset, entities = rcache + for idx, row in enumerate(rset.rows): + if row[0] == targeteid: + break + else: + # this may occurs if the cache has been filed by a hook + # after the database update + self.debug('cache inconsistency for %s %s %s %s', eid, rtype, + role, targeteid) + return + rset = rset.copy() + entities = list(entities) + del rset.rows[idx] + if isinstance(rset.description, list): # else description not set + del rset.description[idx] + del entities[idx] + rset.rowcount -= 1 + entity._cw_related_cache['%s_%s' % (rtype, role)] = ( + rset, tuple(entities)) # Tracking of entities added of removed in the transaction ################## + @_open_only def deleted_in_transaction(self, eid): """return True if the entity of the given eid is being deleted in the current transaction """ - return eid in self.data.get('pendingeids', ()) + return eid in self.transaction_data.get('pendingeids', ()) + @_open_only def added_in_transaction(self, eid): """return True if the entity of the given eid is being created in the current transaction """ - return eid in self.data.get('neweids', ()) + return eid in self.transaction_data.get('neweids', ()) # Operation management #################################################### + @_open_only def add_operation(self, operation, index=None): """add an operation to be executed at the end of the transaction""" if index is None: @@ -553,6 +861,15 @@ # Hooks control ########################################################### + @_open_only + def allow_all_hooks_but(self, *categories): + return _hooks_control(self, HOOKS_ALLOW_ALL, *categories) + + @_open_only + def deny_all_hooks_but(self, *categories): + return _hooks_control(self, HOOKS_DENY_ALL, *categories) + + @_open_only def disable_hook_categories(self, *categories): """disable the given hook categories: @@ -572,6 +889,7 @@ disabledcats |= changes # changes is small hence faster return tuple(changes) + @_open_only def enable_hook_categories(self, *categories): """enable the given hook categories: @@ -591,6 +909,7 @@ disabledcats -= changes # changes is small hence faster return tuple(changes) + @_open_only def is_hook_category_activated(self, category): """return a boolean telling if the given category is currently activated or not @@ -599,6 +918,7 @@ return category in self.enabled_hook_cats return category not in self.disabled_hook_cats + @_open_only def is_hook_activated(self, hook): """return a boolean telling if the given hook class is currently activated or not @@ -606,11 +926,18 @@ return self.is_hook_category_activated(hook.category) # Security management ##################################################### + + @_open_only + def security_enabled(self, read=None, write=None): + return _security_enabled(self, read=read, write=write) + @property + @_open_only def read_security(self): return self._read_security @read_security.setter + @_open_only def read_security(self, activated): oldmode = self._read_security self._read_security = activated @@ -636,53 +963,219 @@ # undo support ############################################################ + @_open_only def ertype_supports_undo(self, ertype): return self.undo_actions and ertype not in NO_UNDO_TYPES + @_open_only def transaction_uuid(self, set=True): - uuid = self.data.get('tx_uuid') + uuid = self.transaction_data.get('tx_uuid') if set and uuid is None: - raise KeyError + self.transaction_data['tx_uuid'] = uuid = uuid4().hex + self.repo.system_source.start_undoable_transaction(self, uuid) return uuid + @_open_only def transaction_inc_action_counter(self): - num = self.data.setdefault('tx_action_count', 0) + 1 - self.data['tx_action_count'] = num + num = self.transaction_data.setdefault('tx_action_count', 0) + 1 + self.transaction_data['tx_action_count'] = num return num # db-api like interface ################################################### + @_open_only def source_defs(self): return self.repo.source_defs() + @deprecated('[3.19] use .entity_metas(eid) instead') + @_with_cnx_set + @_open_only def describe(self, eid, asdict=False): """return a tuple (type, sourceuri, extid) for the entity with id """ - metas = self.repo.type_and_source_from_eid(eid, self) + etype, extid, source = self.repo.type_and_source_from_eid(eid, self) + metas = {'type': etype, 'source': source, 'extid': extid} if asdict: - return dict(zip(('type', 'source', 'extid', 'asource'), metas)) - # XXX :-1 for cw compat, use asdict=True for full information - return metas[:-1] + metas['asource'] = meta['source'] # XXX pre 3.19 client compat + return meta + return etype, source, extid + + @_with_cnx_set + @_open_only + def entity_metas(self, eid): + """return a tuple (type, sourceuri, extid) for the entity with id """ + etype, extid, source = self.repo.type_and_source_from_eid(eid, self) + return {'type': etype, 'source': source, 'extid': extid} + + # core method ############################################################# + + @_with_cnx_set + @_open_only + def execute(self, rql, kwargs=None, eid_key=None, build_descr=True): + """db-api like method directly linked to the querier execute method. + + See :meth:`cubicweb.dbapi.Cursor.execute` documentation. + """ + self._session_timestamp.touch() + if eid_key is not None: + warn('[3.8] eid_key is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) + rset = self._execute(self, rql, kwargs, build_descr) + rset.req = self + self._session_timestamp.touch() + return rset + + @_open_only + def rollback(self, free_cnxset=True, reset_pool=None): + """rollback the current transaction""" + if reset_pool is not None: + warn('[3.13] use free_cnxset argument instead for reset_pool', + DeprecationWarning, stacklevel=2) + free_cnxset = reset_pool + if self._cnxset_count != 0: + # we are inside ensure_cnx_set, don't lose it + free_cnxset = False + cnxset = self.cnxset + if cnxset is None: + self.clear() + self._session_timestamp.touch() + self.debug('rollback transaction %s done (no db activity)', self.connectionid) + return + try: + # by default, operations are executed with security turned off + with self.security_enabled(False, False): + while self.pending_operations: + try: + operation = self.pending_operations.pop(0) + operation.handle_event('rollback_event') + except BaseException: + self.critical('rollback error', exc_info=sys.exc_info()) + continue + cnxset.rollback() + self.debug('rollback for transaction %s done', self.connectionid) + finally: + self._session_timestamp.touch() + if free_cnxset: + self._free_cnxset(ignoremode=True) + self.clear() - def source_from_eid(self, eid): - """return the source where the entity with id is located""" - return self.repo.source_from_eid(eid, self) + @_open_only + def commit(self, free_cnxset=True, reset_pool=None): + """commit the current session's transaction""" + if reset_pool is not None: + warn('[3.13] use free_cnxset argument instead for reset_pool', + DeprecationWarning, stacklevel=2) + free_cnxset = reset_pool + if self.cnxset is None: + assert not self.pending_operations + self.clear() + self._session_timestamp.touch() + self.debug('commit transaction %s done (no db activity)', self.connectionid) + return + if self._cnxset_count != 0: + # we are inside ensure_cnx_set, don't lose it + free_cnxset = False + cstate = self.commit_state + if cstate == 'uncommitable': + raise QueryError('transaction must be rolled back') + if cstate is not None: + return + # on rollback, an operation should have the following state + # information: + # - processed by the precommit/commit event or not + # - if processed, is it the failed operation + debug = server.DEBUG & server.DBG_OPS + try: + # by default, operations are executed with security turned off + with self.security_enabled(False, False): + processed = [] + self.commit_state = 'precommit' + if debug: + print self.commit_state, '*' * 20 + try: + while self.pending_operations: + operation = self.pending_operations.pop(0) + operation.processed = 'precommit' + processed.append(operation) + if debug: + print operation + operation.handle_event('precommit_event') + self.pending_operations[:] = processed + self.debug('precommit transaction %s done', self.connectionid) + except BaseException: + # if error on [pre]commit: + # + # * set .failed = True on the operation causing the failure + # * call revert_event on processed operations + # * call rollback_event on *all* operations + # + # that seems more natural than not calling rollback_event + # for processed operations, and allow generic rollback + # instead of having to implements rollback, revertprecommit + # and revertcommit, that will be enough in mont case. + operation.failed = True + if debug: + print self.commit_state, '*' * 20 + for operation in reversed(processed): + if debug: + print operation + try: + operation.handle_event('revertprecommit_event') + except BaseException: + self.critical('error while reverting precommit', + exc_info=True) + # XXX use slice notation since self.pending_operations is a + # read-only property. + self.pending_operations[:] = processed + self.pending_operations + self.rollback(free_cnxset) + raise + self.cnxset.commit() + self.commit_state = 'postcommit' + if debug: + print self.commit_state, '*' * 20 + while self.pending_operations: + operation = self.pending_operations.pop(0) + if debug: + print operation + operation.processed = 'postcommit' + try: + operation.handle_event('postcommit_event') + except BaseException: + self.critical('error while postcommit', + exc_info=sys.exc_info()) + self.debug('postcommit transaction %s done', self.connectionid) + return self.transaction_uuid(set=False) + finally: + self._session_timestamp.touch() + if free_cnxset: + self._free_cnxset(ignoremode=True) + self.clear() # resource accessors ###################################################### + @_with_cnx_set + @_open_only + def call_service(self, regid, **kwargs): + self.debug('calling service %s', regid) + service = self.vreg['services'].select(regid, self, **kwargs) + return service.call(**kwargs) + + @_with_cnx_set + @_open_only def system_sql(self, sql, args=None, rollback_on_failure=True): """return a sql cursor on the system database""" if sql.split(None, 1)[0].upper() != 'SELECT': self.mode = 'write' - source = self.cnxset.source('system') + source = self.repo.system_source try: return source.doexec(self, sql, args, rollback=rollback_on_failure) except (source.OperationalError, source.InterfaceError): if not rollback_on_failure: raise source.warning("trying to reconnect") - self.cnxset.reconnect(source) + self.cnxset.reconnect() return source.doexec(self, sql, args, rollback=rollback_on_failure) + @_open_only def rtype_eids_rdef(self, rtype, eidfrom, eidto): # use type_and_source_from_eid instead of type_from_eid for optimization # (avoid two extra methods call) @@ -691,31 +1184,49 @@ return self.vreg.schema.rschema(rtype).rdefs[(subjtype, objtype)] -def tx_attr(attr_name, writable=False): - """return a property to forward attribute access to transaction. +def cnx_attr(attr_name, writable=False): + """return a property to forward attribute access to connection. This is to be used by session""" args = {} - def attr_from_tx(session): - return getattr(session._tx, attr_name) - args['fget'] = attr_from_tx + @deprecated('[3.19] use a Connection object instead') + def attr_from_cnx(session): + return getattr(session._cnx, attr_name) + args['fget'] = attr_from_cnx if writable: + @deprecated('[3.19] use a Connection object instead') def write_attr(session, value): - return setattr(session._tx, attr_name, value) + return setattr(session._cnx, attr_name, value) args['fset'] = write_attr return property(**args) -def tx_meth(meth_name): - """return a function forwarding calls to transaction. +def cnx_meth(meth_name): + """return a function forwarding calls to connection. This is to be used by session""" - def meth_from_tx(session, *args, **kwargs): - return getattr(session._tx, meth_name)(*args, **kwargs) - meth_from_tx.__doc__ = getattr(Transaction, meth_name).__doc__ - return meth_from_tx + @deprecated('[3.19] use a Connection object instead') + def meth_from_cnx(session, *args, **kwargs): + result = getattr(session._cnx, meth_name)(*args, **kwargs) + if getattr(result, '_cw', None) is not None: + result._cw = session + return result + meth_from_cnx.__doc__ = getattr(Connection, meth_name).__doc__ + return meth_from_cnx + +class Timestamp(object): + + def __init__(self): + self.value = time() + + def touch(self): + self.value = time() + + def __float__(self): + return float(self.value) -class Session(RequestSessionBase): +class Session(RequestSessionBase): # XXX repoapi: stop being a + # RequestSessionBase at some point """Repository user session This ties all together: @@ -733,23 +1244,23 @@ :attr:`data` is a dictionary containing shared data, used to communicate extra information between the client and the repository - :attr:`_txs` is a dictionary of :class:`TransactionData` instance, one - for each running transaction. The key is the transaction id. By default - the transaction id is the thread name but it can be otherwise (per dbapi + :attr:`_cnxs` is a dictionary of :class:`Connection` instance, one + for each running connection. The key is the connection id. By default + the connection id is the thread name but it can be otherwise (per dbapi cursor for instance, or per thread name *from another process*). - :attr:`__threaddata` is a thread local storage whose `tx` attribute - refers to the proper instance of :class:`Transaction` according to the - transaction. + :attr:`__threaddata` is a thread local storage whose `cnx` attribute + refers to the proper instance of :class:`Connection` according to the + connection. - You should not have to use neither :attr:`_tx` nor :attr:`__threaddata`, - simply access transaction data transparently through the :attr:`_tx` + You should not have to use neither :attr:`_cnx` nor :attr:`__threaddata`, + simply access connection data transparently through the :attr:`_cnx` property. Also, you usually don't have to access it directly since current - transaction's data may be accessed/modified through properties / methods: + connection's data may be accessed/modified through properties / methods: - :attr:`transaction_data`, similarly to :attr:`data`, is a dictionary + :attr:`connection_data`, similarly to :attr:`data`, is a dictionary containing some shared data that should be cleared at the end of the - transaction. Hooks and operations may put arbitrary data in there, and + connection. Hooks and operations may put arbitrary data in there, and this may also be used as a communication channel between the client and the repository. @@ -758,7 +1269,7 @@ .. automethod:: cubicweb.server.session.Session.added_in_transaction .. automethod:: cubicweb.server.session.Session.deleted_in_transaction - Transaction state information: + Connection state information: :attr:`running_dbapi_query`, boolean flag telling if the executing query is coming from a dbapi connection or is a query from within the repository @@ -831,10 +1342,10 @@ def __init__(self, user, repo, cnxprops=None, _id=None): super(Session, self).__init__(repo.vreg) - self.id = _id or make_uid(unormalize(user.login).encode('UTF8')) - self.user = user + self.sessionid = _id or make_uid(unormalize(user.login).encode('UTF8')) + self.user = user # XXX repoapi: deprecated and store only a login. self.repo = repo - self.timestamp = time() + self._timestamp = Timestamp() self.default_mode = 'read' # short cut to querier .execute method self._execute = repo.querier.execute @@ -844,53 +1355,96 @@ # i18n initialization self.set_language(user.prefered_language()) ### internals - # Transaction of this section - self._txs = {} + # Connection of this section + self._cnxs = {} # XXX repoapi: remove this when nobody use the session + # as a Connection # Data local to the thread - self.__threaddata = threading.local() + self.__threaddata = threading.local() # XXX repoapi: remove this when + # nobody use the session as a Connection self._cnxset_tracker = CnxSetTracker() self._closed = False self._lock = threading.RLock() def __unicode__(self): return '' % ( - unicode(self.user.login), self.id, id(self)) + unicode(self.user.login), self.sessionid, id(self)) + @property + def timestamp(self): + return float(self._timestamp) + + @property + @deprecated('[3.19] session.id is deprecated, use session.sessionid') + def id(self): + return self.sessionid - def get_tx(self, txid): - """return the transaction attached to this session + @property + def login(self): + return self.user.login + + def new_cnx(self): + """Return a new Connection object linked to the session - Transaction is created if necessary""" - with self._lock: # no transaction exist with the same id + The returned Connection will *not* be managed by the Session. + """ + return Connection(self) + + def _get_cnx(self, cnxid): + """return the connection attached to this session + + Connection is created if necessary""" + with self._lock: # no connection exist with the same id try: if self.closed: - raise SessionClosedError('try to access connections set on a closed session %s' % self.id) - tx = self._txs[txid] + raise SessionClosedError('try to access connections set on' + ' a closed session %s' % self.id) + cnx = self._cnxs[cnxid] + assert cnx._session_handled except KeyError: - rewriter = RQLRewriter(self) - tx = Transaction(txid, self, rewriter) - self._txs[txid] = tx - return tx + cnx = Connection(self, cnxid=cnxid, session_handled=True) + self._cnxs[cnxid] = cnx + cnx.__enter__() + return cnx - def set_tx(self, txid=None): - """set the default transaction of the current thread to + def _close_cnx(self, cnx): + """Close a Connection related to a session""" + assert cnx._session_handled + cnx.__exit__() + self._cnxs.pop(cnx.connectionid, None) + try: + if self.__threaddata.cnx is cnx: + del self.__threaddata.cnx + except AttributeError: + pass - Transaction is created if necessary""" - if txid is None: - txid = threading.currentThread().getName() - self.__threaddata.tx = self.get_tx(txid) + def set_cnx(self, cnxid=None): + # XXX repoapi: remove this when nobody use the session as a Connection + """set the default connection of the current thread to + + Connection is created if necessary""" + if cnxid is None: + cnxid = threading.currentThread().getName() + cnx = self._get_cnx(cnxid) + # New style session should not be accesed through the session. + assert cnx._session_handled + self.__threaddata.cnx = cnx @property - def _tx(self): - """default transaction for current session in current thread""" + def _cnx(self): + """default connection for current session in current thread""" try: - return self.__threaddata.tx + return self.__threaddata.cnx except AttributeError: - self.set_tx() - return self.__threaddata.tx + self.set_cnx() + return self.__threaddata.cnx + @deprecated('[3.19] use a Connection object instead') def get_option_value(self, option, foreid=None): - return self.repo.get_option_value(option, foreid) + if foreid is not None: + warn('[3.19] foreid argument is deprecated', DeprecationWarning, + stacklevel=2) + return self.repo.get_option_value(option) + @deprecated('[3.19] use a Connection object instead') def transaction(self, free_cnxset=True): """return context manager to enter a transaction for the session: when exiting the `with` block on exception, call `session.rollback()`, else @@ -901,184 +1455,55 @@ """ return transaction(self, free_cnxset) - - @deprecated('[3.17] do not use hijack_user. create new Session object') - def hijack_user(self, user): - """return a fake request/session using specified user""" - session = Session(user, self.repo) - tx = session._tx - tx.cnxset = self.cnxset - # share pending_operations, else operation added in the hi-jacked - # session such as SendMailOp won't ever be processed - tx.pending_operations = self.pending_operations - # everything in tx.data should be copied back but the entity - # type cache we don't want to avoid security pb - tx.data = self._tx.data.copy() - tx.data.pop('ecache', None) - return session - - def add_relation(self, fromeid, rtype, toeid): - """provide direct access to the repository method to add a relation. - - This is equivalent to the following rql query: - - SET X rtype Y WHERE X eid fromeid, T eid toeid - - without read security check but also all the burden of rql execution. - You may use this in hooks when you know both eids of the relation you - want to add. - """ - self.add_relations([(rtype, [(fromeid, toeid)])]) - - def add_relations(self, relations): - '''set many relation using a shortcut similar to the one in add_relation - - relations is a list of 2-uples, the first element of each - 2-uple is the rtype, and the second is a list of (fromeid, - toeid) tuples - ''' - edited_entities = {} - relations_dict = {} - with self.security_enabled(False, False): - for rtype, eids in relations: - if self.vreg.schema[rtype].inlined: - for fromeid, toeid in eids: - if fromeid not in edited_entities: - entity = self.entity_from_eid(fromeid) - edited = EditedEntity(entity) - edited_entities[fromeid] = edited - else: - edited = edited_entities[fromeid] - edited.edited_attribute(rtype, toeid) - else: - relations_dict[rtype] = eids - self.repo.glob_add_relations(self, relations_dict) - for edited in edited_entities.itervalues(): - self.repo.glob_update_entity(self, edited) - - - def delete_relation(self, fromeid, rtype, toeid): - """provide direct access to the repository method to delete a relation. - - This is equivalent to the following rql query: - - DELETE X rtype Y WHERE X eid fromeid, T eid toeid - - without read security check but also all the burden of rql execution. - You may use this in hooks when you know both eids of the relation you - want to delete. - """ - with self.security_enabled(False, False): - if self.vreg.schema[rtype].inlined: - entity = self.entity_from_eid(fromeid) - entity.cw_attr_cache[rtype] = None - self.repo.glob_update_entity(self, entity, set((rtype,))) - else: - self.repo.glob_delete_relation(self, fromeid, rtype, toeid) + add_relation = cnx_meth('add_relation') + add_relations = cnx_meth('add_relations') + delete_relation = cnx_meth('delete_relation') # relations cache handling ################################################# - def update_rel_cache_add(self, subject, rtype, object, symmetric=False): - self._update_entity_rel_cache_add(subject, rtype, 'subject', object) - if symmetric: - self._update_entity_rel_cache_add(object, rtype, 'subject', subject) - else: - self._update_entity_rel_cache_add(object, rtype, 'object', subject) - - def update_rel_cache_del(self, subject, rtype, object, symmetric=False): - self._update_entity_rel_cache_del(subject, rtype, 'subject', object) - if symmetric: - self._update_entity_rel_cache_del(object, rtype, 'object', object) - else: - self._update_entity_rel_cache_del(object, rtype, 'object', subject) - - def _update_entity_rel_cache_add(self, eid, rtype, role, targeteid): - try: - entity = self.entity_cache(eid) - except KeyError: - return - rcache = entity.cw_relation_cached(rtype, role) - if rcache is not None: - rset, entities = rcache - rset = rset.copy() - entities = list(entities) - rset.rows.append([targeteid]) - if not isinstance(rset.description, list): # else description not set - rset.description = list(rset.description) - rset.description.append([self.describe(targeteid)[0]]) - targetentity = self.entity_from_eid(targeteid) - if targetentity.cw_rset is None: - targetentity.cw_rset = rset - targetentity.cw_row = rset.rowcount - targetentity.cw_col = 0 - rset.rowcount += 1 - entities.append(targetentity) - entity._cw_related_cache['%s_%s' % (rtype, role)] = ( - rset, tuple(entities)) - - def _update_entity_rel_cache_del(self, eid, rtype, role, targeteid): - try: - entity = self.entity_cache(eid) - except KeyError: - return - rcache = entity.cw_relation_cached(rtype, role) - if rcache is not None: - rset, entities = rcache - for idx, row in enumerate(rset.rows): - if row[0] == targeteid: - break - else: - # this may occurs if the cache has been filed by a hook - # after the database update - self.debug('cache inconsistency for %s %s %s %s', eid, rtype, - role, targeteid) - return - rset = rset.copy() - entities = list(entities) - del rset.rows[idx] - if isinstance(rset.description, list): # else description not set - del rset.description[idx] - del entities[idx] - rset.rowcount -= 1 - entity._cw_related_cache['%s_%s' % (rtype, role)] = ( - rset, tuple(entities)) + update_rel_cache_add = cnx_meth('update_rel_cache_add') + update_rel_cache_del = cnx_meth('update_rel_cache_del') # resource accessors ###################################################### - system_sql = tx_meth('system_sql') - deleted_in_transaction = tx_meth('deleted_in_transaction') - added_in_transaction = tx_meth('added_in_transaction') - rtype_eids_rdef = tx_meth('rtype_eids_rdef') + system_sql = cnx_meth('system_sql') + deleted_in_transaction = cnx_meth('deleted_in_transaction') + added_in_transaction = cnx_meth('added_in_transaction') + rtype_eids_rdef = cnx_meth('rtype_eids_rdef') # security control ######################################################### - + @deprecated('[3.19] use a Connection object instead') def security_enabled(self, read=None, write=None): - return _security_enabled(self, read=read, write=write) + return _session_security_enabled(self, read=read, write=write) - read_security = tx_attr('read_security', writable=True) - write_security = tx_attr('write_security', writable=True) - running_dbapi_query = tx_attr('running_dbapi_query') + read_security = cnx_attr('read_security', writable=True) + write_security = cnx_attr('write_security', writable=True) + running_dbapi_query = cnx_attr('running_dbapi_query') # hooks activation control ################################################# # all hooks should be activated during normal execution + + @deprecated('[3.19] use a Connection object instead') def allow_all_hooks_but(self, *categories): - return _hooks_control(self, HOOKS_ALLOW_ALL, *categories) + return _session_hooks_control(self, HOOKS_ALLOW_ALL, *categories) + @deprecated('[3.19] use a Connection object instead') def deny_all_hooks_but(self, *categories): - return _hooks_control(self, HOOKS_DENY_ALL, *categories) - - hooks_mode = tx_attr('hooks_mode') + return _session_hooks_control(self, HOOKS_DENY_ALL, *categories) - disabled_hook_categories = tx_attr('disabled_hook_cats') - enabled_hook_categories = tx_attr('enabled_hook_cats') - disable_hook_categories = tx_meth('disable_hook_categories') - enable_hook_categories = tx_meth('enable_hook_categories') - is_hook_category_activated = tx_meth('is_hook_category_activated') - is_hook_activated = tx_meth('is_hook_activated') + hooks_mode = cnx_attr('hooks_mode') + + disabled_hook_categories = cnx_attr('disabled_hook_cats') + enabled_hook_categories = cnx_attr('enabled_hook_cats') + disable_hook_categories = cnx_meth('disable_hook_categories') + enable_hook_categories = cnx_meth('enable_hook_categories') + is_hook_category_activated = cnx_meth('is_hook_category_activated') + is_hook_activated = cnx_meth('is_hook_activated') # connection management ################################################### + @deprecated('[3.19] use a Connection object instead') def keep_cnxset_mode(self, mode): """set `mode`, e.g. how the session will keep its connections set: @@ -1099,16 +1524,17 @@ else: # mode == 'write' self.default_mode = 'read' - mode = tx_attr('mode', writable=True) - commit_state = tx_attr('commit_state', writable=True) + mode = cnx_attr('mode', writable=True) + commit_state = cnx_attr('commit_state', writable=True) @property + @deprecated('[3.19] use a Connection object instead') def cnxset(self): """connections set, set according to transaction mode for each query""" if self._closed: self.free_cnxset(True) raise SessionClosedError('try to access connections set on a closed session %s' % self.id) - return self._tx.cnxset + return self._cnx.cnxset def set_cnxset(self): """the session need a connections set to execute some queries""" @@ -1116,20 +1542,28 @@ if self._closed: self.free_cnxset(True) raise SessionClosedError('try to set connections set on a closed session %s' % self.id) - return self._tx.set_cnxset() - free_cnxset = tx_meth('free_cnxset') + return self._cnx.set_cnxset() + free_cnxset = cnx_meth('free_cnxset') + ensure_cnx_set = cnx_attr('ensure_cnx_set') def _touch(self): """update latest session usage timestamp and reset mode to read""" - self.timestamp = time() - self.local_perm_cache.clear() # XXX simply move in tx.data, no? + self._timestamp.touch() + + local_perm_cache = cnx_attr('local_perm_cache') + @local_perm_cache.setter + def local_perm_cache(self, value): + #base class assign an empty dict:-( + assert value == {} + pass # shared data handling ################################################### + @deprecated('[3.19] use session or transaction data') def get_shared_data(self, key, default=None, pop=False, txdata=False): """return value associated to `key` in session data""" if txdata: - data = self._tx.data + return self._cnx.get_shared_data(key, default, pop, txdata=True) else: data = self.data if pop: @@ -1137,50 +1571,43 @@ else: return data.get(key, default) + @deprecated('[3.19] use session or transaction data') def set_shared_data(self, key, value, txdata=False): """set value associated to `key` in session data""" if txdata: - self._tx.data[key] = value + return self._cnx.set_shared_data(key, value, txdata=True) else: self.data[key] = value # server-side service call ################################################# - def call_service(self, regid, async=False, **kwargs): - return self.repo._call_service_with_session(self, regid, async, - **kwargs) - + call_service = cnx_meth('call_service') # request interface ####################################################### @property + @deprecated('[3.19] use a Connection object instead') def cursor(self): """return a rql cursor""" return self - set_entity_cache = tx_meth('set_entity_cache') - entity_cache = tx_meth('entity_cache') - cache_entities = tx_meth('cached_entities') - drop_entity_cache = tx_meth('drop_entity_cache') + set_entity_cache = cnx_meth('set_entity_cache') + entity_cache = cnx_meth('entity_cache') + cache_entities = cnx_meth('cached_entities') + drop_entity_cache = cnx_meth('drop_entity_cache') - def from_controller(self): - """return the id (string) of the controller issuing the request (no - sense here, always return 'view') - """ - return 'view' - - source_defs = tx_meth('source_defs') - describe = tx_meth('describe') - source_from_eid = tx_meth('source_from_eid') + source_defs = cnx_meth('source_defs') + entity_metas = cnx_meth('entity_metas') + describe = cnx_meth('describe') # XXX deprecated in 3.19 - def execute(self, rql, kwargs=None, build_descr=True): + @deprecated('[3.19] use a Connection object instead') + def execute(self, *args, **kwargs): """db-api like method directly linked to the querier execute method. See :meth:`cubicweb.dbapi.Cursor.execute` documentation. """ - self.timestamp = time() # update timestamp - rset = self._execute(self, rql, kwargs, build_descr) + rset = self._cnx.execute(*args, **kwargs) rset.req = self return rset @@ -1190,150 +1617,39 @@ by _touch """ try: - tx = self.__threaddata.tx + cnx = self.__threaddata.cnx except AttributeError: pass else: if free_cnxset: - self.free_cnxset() - if tx.ctx_count == 0: - self._clear_thread_storage(tx) + cnx._free_cnxset() + if cnx.ctx_count == 0: + self._close_cnx(cnx) else: - self._clear_tx_storage(tx) + cnx.clear() else: - self._clear_tx_storage(tx) + cnx.clear() - def _clear_thread_storage(self, tx): - self._txs.pop(tx.transactionid, None) - try: - del self.__threaddata.tx - except AttributeError: - pass - - def _clear_tx_storage(self, tx): - tx.clear() - tx._rewriter = RQLRewriter(self) - + @deprecated('[3.19] use a Connection object instead') def commit(self, free_cnxset=True, reset_pool=None): """commit the current session's transaction""" - if reset_pool is not None: - warn('[3.13] use free_cnxset argument instead for reset_pool', - DeprecationWarning, stacklevel=2) - free_cnxset = reset_pool - if self.cnxset is None: - assert not self.pending_operations - self._clear_thread_data() - self._touch() - self.debug('commit session %s done (no db activity)', self.id) - return - cstate = self.commit_state + cstate = self._cnx.commit_state if cstate == 'uncommitable': raise QueryError('transaction must be rolled back') - if cstate is not None: - return - # on rollback, an operation should have the following state - # information: - # - processed by the precommit/commit event or not - # - if processed, is it the failed operation - debug = server.DEBUG & server.DBG_OPS try: - # by default, operations are executed with security turned off - with self.security_enabled(False, False): - processed = [] - self.commit_state = 'precommit' - if debug: - print self.commit_state, '*' * 20 - try: - while self.pending_operations: - operation = self.pending_operations.pop(0) - operation.processed = 'precommit' - processed.append(operation) - if debug: - print operation - operation.handle_event('precommit_event') - self.pending_operations[:] = processed - self.debug('precommit session %s done', self.id) - except BaseException: - # save exception context, it may be clutered below by - # exception in revert_* event - exc_info = sys.exc_info() - # if error on [pre]commit: - # - # * set .failed = True on the operation causing the failure - # * call revert_event on processed operations - # * call rollback_event on *all* operations - # - # that seems more natural than not calling rollback_event - # for processed operations, and allow generic rollback - # instead of having to implements rollback, revertprecommit - # and revertcommit, that will be enough in mont case. - operation.failed = True - if debug: - print self.commit_state, '*' * 20 - for operation in reversed(processed): - if debug: - print operation - try: - operation.handle_event('revertprecommit_event') - except BaseException: - self.critical('error while reverting precommit', - exc_info=True) - # XXX use slice notation since self.pending_operations is a - # read-only property. - self.pending_operations[:] = processed + self.pending_operations - self.rollback(free_cnxset) - raise exc_info[0], exc_info[1], exc_info[2] - self.cnxset.commit() - self.commit_state = 'postcommit' - if debug: - print self.commit_state, '*' * 20 - while self.pending_operations: - operation = self.pending_operations.pop(0) - if debug: - print operation - operation.processed = 'postcommit' - try: - operation.handle_event('postcommit_event') - except BaseException: - self.critical('error while postcommit', - exc_info=sys.exc_info()) - self.debug('postcommit session %s done', self.id) - return self.transaction_uuid(set=False) + return self._cnx.commit(free_cnxset, reset_pool) finally: - self._touch() - if free_cnxset: - self.free_cnxset(ignoremode=True) self._clear_thread_data(free_cnxset) - def rollback(self, free_cnxset=True, reset_pool=None): + @deprecated('[3.19] use a Connection object instead') + def rollback(self, *args, **kwargs): """rollback the current session's transaction""" - if reset_pool is not None: - warn('[3.13] use free_cnxset argument instead for reset_pool', - DeprecationWarning, stacklevel=2) - free_cnxset = reset_pool - # don't use self.cnxset, rollback may be called with _closed == True - cnxset = self._tx.cnxset - if cnxset is None: - self._clear_thread_data() - self._touch() - self.debug('rollback session %s done (no db activity)', self.id) - return + return self._rollback(*args, **kwargs) + + def _rollback(self, free_cnxset=True, **kwargs): try: - # by default, operations are executed with security turned off - with self.security_enabled(False, False): - while self.pending_operations: - try: - operation = self.pending_operations.pop(0) - operation.handle_event('rollback_event') - except BaseException: - self.critical('rollback error', exc_info=sys.exc_info()) - continue - cnxset.rollback() - self.debug('rollback for session %s done', self.id) + return self._cnx.rollback(free_cnxset, **kwargs) finally: - self._touch() - if free_cnxset: - self.free_cnxset(ignoremode=True) self._clear_thread_data(free_cnxset) def close(self): @@ -1342,63 +1658,67 @@ with self._lock: self._closed = True tracker.close() - self.rollback() - self.debug('waiting for open transaction of session: %s', self) + if self._cnx._session_handled: + self._rollback() + self.debug('waiting for open connection of session: %s', self) timeout = 10 pendings = tracker.wait(timeout) if pendings: - self.error('%i transaction still alive after 10 seconds, will close ' + self.error('%i connection still alive after 10 seconds, will close ' 'session anyway', len(pendings)) - for txid in pendings: - tx = self._txs.get(txid) - if tx is not None: - # drop tx.cnxset + for cnxid in pendings: + cnx = self._cnxs.get(cnxid) + if cnx is not None: + # drop cnx.cnxset with tracker: try: - cnxset = tx.cnxset + cnxset = cnx.cnxset if cnxset is None: continue - tx.cnxset = None + cnx.cnxset = None except RuntimeError: msg = 'issue while force free of cnxset in %s' - self.error(msg, tx) + self.error(msg, cnx) # cnxset.reconnect() do an hard reset of the cnxset # it force it to be freed cnxset.reconnect() self.repo._free_cnxset(cnxset) del self.__threaddata - del self._txs + del self._cnxs @property def closed(self): - return not hasattr(self, '_txs') + return not hasattr(self, '_cnxs') # transaction data/operations management ################################## - transaction_data = tx_attr('data') - pending_operations = tx_attr('pending_operations') - pruned_hooks_cache = tx_attr('pruned_hooks_cache') - add_operation = tx_meth('add_operation') + transaction_data = cnx_attr('transaction_data') + pending_operations = cnx_attr('pending_operations') + pruned_hooks_cache = cnx_attr('pruned_hooks_cache') + add_operation = cnx_meth('add_operation') # undo support ############################################################ - ertype_supports_undo = tx_meth('ertype_supports_undo') - transaction_inc_action_counter = tx_meth('transaction_inc_action_counter') - - def transaction_uuid(self, set=True): - try: - return self._tx.transaction_uuid(set=set) - except KeyError: - self._tx.data['tx_uuid'] = uuid = uuid4().hex - self.repo.system_source.start_undoable_transaction(self, uuid) - return uuid + ertype_supports_undo = cnx_meth('ertype_supports_undo') + transaction_inc_action_counter = cnx_meth('transaction_inc_action_counter') + transaction_uuid = cnx_meth('transaction_uuid') # querier helpers ######################################################### - rql_rewriter = tx_attr('_rewriter') + rql_rewriter = cnx_attr('_rewriter') # deprecated ############################################################### + @property + def anonymous_session(self): + # XXX for now, anonymous_user only exists in webconfig (and testconfig). + # It will only be present inside all-in-one instance. + # there is plan to move it down to global config. + if not hasattr(self.repo.config, 'anonymous_user'): + # not a web or test config, no anonymous user + return False + return self.user.login == self.repo.config.anonymous_user()[0] + @deprecated('[3.13] use getattr(session.rtype_eids_rdef(rtype, eidfrom, eidto), prop)') def schema_rproperty(self, rtype, eidfrom, eidto, rprop): return getattr(self.rtype_eids_rdef(rtype, eidfrom, eidto), rprop) @@ -1435,10 +1755,6 @@ super(InternalSession, self).__init__(InternalManager(), repo, cnxprops, _id='internal') self.user._cw = self # XXX remove when "vreg = user._cw.vreg" hack in entity.py is gone - if not safe: - self.disable_hook_categories('integrity') - self.disable_hook_categories('security') - self._tx.ctx_count += 1 def __enter__(self): return self @@ -1452,7 +1768,7 @@ if self.repo.shutting_down: self.free_cnxset(True) raise ShuttingDown('repository is shutting down') - return self._tx.cnxset + return self._cnx.cnxset class InternalManager(object): @@ -1460,10 +1776,12 @@ bootstrapping the repository or creating regular users according to repository content """ - def __init__(self): + def __init__(self, lang='en'): self.eid = -1 self.login = u'__internal_manager__' self.properties = {} + self.groups = set(['managers']) + self.lang = lang def matching_groups(self, groups): return 1 @@ -1476,7 +1794,7 @@ def property_value(self, key): if key == 'ui.language': - return 'en' + return self.lang return None def prefered_language(self, language=None): @@ -1501,3 +1819,4 @@ from logging import getLogger from cubicweb import set_log_methods set_log_methods(Session, getLogger('cubicweb.session')) +set_log_methods(Connection, getLogger('cubicweb.session')) diff -r 84738d495ffd -r 793377697c81 server/sources/__init__.py --- a/server/sources/__init__.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/sources/__init__.py Wed Sep 24 18:04:30 2014 +0200 @@ -61,32 +61,9 @@ # return true so it can be used as assertion (and so be killed by python -O) return True -class TimedCache(dict): - def __init__(self, ttl): - # time to live in seconds - if ttl <= 0: - raise ValueError('TimedCache initialized with a ttl of %ss' % ttl.seconds) - self.ttl = timedelta(seconds=ttl) - - def __setitem__(self, key, value): - dict.__setitem__(self, key, (datetime.utcnow(), value)) - - def __getitem__(self, key): - return dict.__getitem__(self, key)[1] - - def clear_expired(self): - now_ = datetime.utcnow() - ttl = self.ttl - for key, (timestamp, value) in self.items(): - if now_ - timestamp > ttl: - del self[key] - class AbstractSource(object): """an abstract class for sources""" - # does the source copy data into the system source, or is it a *true* source - # (i.e. entities are not stored physically here) - copy_based_source = False # boolean telling if modification hooks should be called when something is # modified in this source @@ -108,10 +85,6 @@ # a reference to the instance'schema (may differs from the source'schema) schema = None - # multi-sources planning control - dont_cross_relations = () - cross_relations = () - # force deactivation (configuration error for instance) disabled = False @@ -259,29 +232,15 @@ """open and return a connection to the source""" raise NotImplementedError(self) - def check_connection(self, cnx): - """Check connection validity, return None if the connection is still - valid else a new connection (called when the connections set using the - given connection is being attached to a session). Do nothing by default. - """ - pass - def close_source_connections(self): for cnxset in self.repo.cnxsets: - cnxset._cursors.pop(self.uri, None) - cnxset.source_cnxs[self.uri][1].close() + cnxset.cu = None + cnxset.cnx.close() def open_source_connections(self): for cnxset in self.repo.cnxsets: - cnxset.source_cnxs[self.uri] = (self, self.get_connection()) - - def cnxset_freed(self, cnx): - """the connections set holding the given connection is being reseted - from its current attached session. - - do nothing by default - """ - pass + cnxset.cnx = self.get_connection() + cnxset.cu = cnxset.cnx.cursor() # cache handling ########################################################### @@ -333,23 +292,7 @@ return wsupport return True - def may_cross_relation(self, rtype): - """return True if the relation may be crossed among sources. Rules are: - - * if this source support the relation, can't be crossed unless explicitly - specified in .cross_relations - - * if this source doesn't support the relation, can be crossed unless - explicitly specified in .dont_cross_relations - """ - # XXX find a way to have relation such as state_of in dont cross - # relation (eg composite relation without both end type available? - # card 1 relation? ...) - if self.support_relation(rtype): - return rtype in self.cross_relations - return rtype not in self.dont_cross_relations - - def before_entity_insertion(self, session, lid, etype, eid, sourceparams): + def before_entity_insertion(self, cnx, lid, etype, eid, sourceparams): """called by the repository when an eid has been attributed for an entity stored here but the entity has not been inserted in the system table yet. @@ -357,12 +300,12 @@ This method must return the an Entity instance representation of this entity. """ - entity = self.repo.vreg['etypes'].etype_class(etype)(session) + entity = self.repo.vreg['etypes'].etype_class(etype)(cnx) entity.eid = eid entity.cw_edited = EditedEntity(entity) return entity - def after_entity_insertion(self, session, lid, entity, sourceparams): + def after_entity_insertion(self, cnx, lid, entity, sourceparams): """called by the repository after an entity stored here has been inserted in the system table. """ @@ -403,7 +346,7 @@ # user authentication api ################################################## - def authenticate(self, session, login, **kwargs): + def authenticate(self, cnx, login, **kwargs): """if the source support CWUser entity type, it should implement this method which should return CWUser eid for the given login/password if this account is defined in this source and valid login / password is @@ -413,7 +356,7 @@ # RQL query api ############################################################ - def syntax_tree_search(self, session, union, + def syntax_tree_search(self, cnx, union, args=None, cachekey=None, varmap=None, debug=0): """return result from this source for a rql query (actually from a rql syntax tree and a solution dictionary mapping each used variable to a @@ -422,15 +365,6 @@ """ raise NotImplementedError(self) - def flying_insert(self, table, session, union, args=None, varmap=None): - """similar as .syntax_tree_search, but inserts data in the temporary - table (on-the-fly if possible, eg for the system source whose the given - cursor come from). If not possible, inserts all data by calling - .executemany(). - """ - res = self.syntax_tree_search(session, union, args, varmap=varmap) - session.cnxset.source('system').manual_insert(res, table, session) - # write modification api ################################################### # read-only sources don't have to implement methods below @@ -487,22 +421,6 @@ """mark entity as being modified, fulltext reindex if needed""" raise NotImplementedError(self) - def delete_info_multi(self, session, entities, uri): - """delete system information on deletion of a list of entities with the - same etype and belinging to the same source - """ - raise NotImplementedError(self) - - def modified_entities(self, session, etypes, mtime): - """return a 2-uple: - * list of (etype, eid) of entities of the given types which have been - modified since the given timestamp (actually entities whose full text - index content has changed) - * list of (etype, eid) of entities of the given types which have been - deleted since the given timestamp - """ - raise NotImplementedError(self) - def index_entity(self, session, entity): """create an operation to [re]index textual content of the given entity on commit @@ -525,90 +443,18 @@ """execute the query and return its result""" raise NotImplementedError(self) - def temp_table_def(self, selection, solution, table, basemap): - raise NotImplementedError(self) - def create_index(self, session, table, column, unique=False): raise NotImplementedError(self) def drop_index(self, session, table, column, unique=False): raise NotImplementedError(self) - def create_temp_table(self, session, table, schema): - raise NotImplementedError(self) - - def clean_temp_data(self, session, temptables): - """remove temporary data, usually associated to temporary tables""" - pass - - - @deprecated('[3.13] use repo.eid2extid(source, eid, session)') - def eid2extid(self, eid, session=None): - return self.repo.eid2extid(self, eid, session) @deprecated('[3.13] use extid2eid(source, value, etype, session, **kwargs)') - def extid2eid(self, value, etype, session=None, **kwargs): + def extid2eid(self, value, etype, session, **kwargs): return self.repo.extid2eid(self, value, etype, session, **kwargs) -class TrFunc(object): - """lower, upper""" - def __init__(self, trname, index, attrname=None): - self._tr = trname.lower() - self.index = index - self.attrname = attrname - - def apply(self, resdict): - value = resdict.get(self.attrname) - if value is not None: - return getattr(value, self._tr)() - return None - - -class GlobTrFunc(TrFunc): - """count, sum, max, min, avg""" - funcs = { - 'count': len, - 'sum': sum, - 'max': max, - 'min': min, - # XXX avg - } - def apply(self, result): - """have to 'groupby' manually. For instance, if we 'count' for index 1: - >>> self.apply([(1, 2), (3, 4), (1, 5)]) - [(1, 7), (3, 4)] - """ - keys, values = [], {} - for row in result: - key = tuple(v for i, v in enumerate(row) if i != self.index) - value = row[self.index] - try: - values[key].append(value) - except KeyError: - keys.append(key) - values[key] = [value] - result = [] - trfunc = self.funcs[self._tr] - for key in keys: - row = list(key) - row.insert(self.index, trfunc(values[key])) - result.append(row) - return result - - -class ConnectionWrapper(object): - def __init__(self, cnx=None): - self.cnx = cnx - def commit(self): - pass - def rollback(self): - pass - def cursor(self): - return None # no actual cursor support - def close(self): - if hasattr(self.cnx, 'close'): - self.cnx.close() from cubicweb.server import SOURCE_TYPES diff -r 84738d495ffd -r 793377697c81 server/sources/datafeed.py --- a/server/sources/datafeed.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/sources/datafeed.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2010-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2010-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -35,7 +35,6 @@ class DataFeedSource(AbstractSource): - copy_based_source = True use_cwuri_as_url = True options = ( @@ -114,18 +113,18 @@ self.parser_id = source_entity.parser self.load_mapping(source_entity._cw) - def _get_parser(self, session, **kwargs): + def _get_parser(self, cnx, **kwargs): return self.repo.vreg['parsers'].select( - self.parser_id, session, source=self, **kwargs) + self.parser_id, cnx, source=self, **kwargs) - def load_mapping(self, session): + def load_mapping(self, cnx): self.mapping = {} self.mapping_idx = {} try: - parser = self._get_parser(session) + parser = self._get_parser(cnx) except (RegistryNotFound, ObjectNotFound): return # no parser yet, don't go further - self._load_mapping(session, parser=parser) + self._load_mapping(cnx, parser=parser) def add_schema_config(self, schemacfg, checkonly=False, parser=None): """added CWSourceSchemaConfig, modify mapping accordingly""" @@ -144,67 +143,64 @@ return False return datetime.utcnow() < (self.latest_retrieval + self.synchro_interval) - def update_latest_retrieval(self, session): + def update_latest_retrieval(self, cnx): self.latest_retrieval = datetime.utcnow() - session.set_cnxset() - session.execute('SET X latest_retrieval %(date)s WHERE X eid %(x)s', - {'x': self.eid, 'date': self.latest_retrieval}) - session.commit() + cnx.execute('SET X latest_retrieval %(date)s WHERE X eid %(x)s', + {'x': self.eid, 'date': self.latest_retrieval}) + cnx.commit() - def acquire_synchronization_lock(self, session): + def acquire_synchronization_lock(self, cnx): # XXX race condition until WHERE of SET queries is executed using # 'SELECT FOR UPDATE' now = datetime.utcnow() - session.set_cnxset() - if not session.execute( + if not cnx.execute( 'SET X in_synchronization %(now)s WHERE X eid %(x)s, ' 'X in_synchronization NULL OR X in_synchronization < %(maxdt)s', {'x': self.eid, 'now': now, 'maxdt': now - self.max_lock_lifetime}): self.error('concurrent synchronization detected, skip pull') - session.commit() + cnx.commit() return False - session.commit() + cnx.commit() return True - def release_synchronization_lock(self, session): - session.set_cnxset() - session.execute('SET X in_synchronization NULL WHERE X eid %(x)s', - {'x': self.eid}) - session.commit() + def release_synchronization_lock(self, cnx): + cnx.execute('SET X in_synchronization NULL WHERE X eid %(x)s', + {'x': self.eid}) + cnx.commit() - def pull_data(self, session, force=False, raise_on_error=False): + def pull_data(self, cnx, force=False, raise_on_error=False): """Launch synchronization of the source if needed. This method is responsible to handle commit/rollback on the given - session. + connection. """ if not force and self.fresh(): return {} - if not self.acquire_synchronization_lock(session): + if not self.acquire_synchronization_lock(cnx): return {} try: - with session.transaction(free_cnxset=False): - return self._pull_data(session, force, raise_on_error) + return self._pull_data(cnx, force, raise_on_error) finally: - self.release_synchronization_lock(session) + cnx.rollback() # rollback first in case there is some dirty + # transaction remaining + self.release_synchronization_lock(cnx) - def _pull_data(self, session, force=False, raise_on_error=False): - importlog = self.init_import_log(session) - myuris = self.source_cwuris(session) - parser = self._get_parser(session, sourceuris=myuris, import_log=importlog) + def _pull_data(self, cnx, force=False, raise_on_error=False): + importlog = self.init_import_log(cnx) + myuris = self.source_cwuris(cnx) + parser = self._get_parser(cnx, sourceuris=myuris, import_log=importlog) if self.process_urls(parser, self.urls, raise_on_error): self.warning("some error occurred, don't attempt to delete entities") else: - parser.handle_deletion(self.config, session, myuris) - self.update_latest_retrieval(session) + parser.handle_deletion(self.config, cnx, myuris) + self.update_latest_retrieval(cnx) stats = parser.stats if stats.get('created'): importlog.record_info('added %s entities' % len(stats['created'])) if stats.get('updated'): importlog.record_info('updated %s entities' % len(stats['updated'])) - session.set_cnxset() - importlog.write_log(session, end_timestamp=self.latest_retrieval) - session.commit() + importlog.write_log(cnx, end_timestamp=self.latest_retrieval) + cnx.commit() return stats def process_urls(self, parser, urls, raise_on_error=False): @@ -229,7 +225,7 @@ error = True return error - def before_entity_insertion(self, session, lid, etype, eid, sourceparams): + def before_entity_insertion(self, cnx, lid, etype, eid, sourceparams): """called by the repository when an eid has been attributed for an entity stored here but the entity has not been inserted in the system table yet. @@ -238,40 +234,40 @@ entity. """ entity = super(DataFeedSource, self).before_entity_insertion( - session, lid, etype, eid, sourceparams) + cnx, lid, etype, eid, sourceparams) entity.cw_edited['cwuri'] = lid.decode('utf-8') entity.cw_edited.set_defaults() sourceparams['parser'].before_entity_copy(entity, sourceparams) return entity - def after_entity_insertion(self, session, lid, entity, sourceparams): + def after_entity_insertion(self, cnx, lid, entity, sourceparams): """called by the repository after an entity stored here has been inserted in the system table. """ - relations = preprocess_inlined_relations(session, entity) - if session.is_hook_category_activated('integrity'): + relations = preprocess_inlined_relations(cnx, entity) + if cnx.is_hook_category_activated('integrity'): entity.cw_edited.check(creation=True) - self.repo.system_source.add_entity(session, entity) + self.repo.system_source.add_entity(cnx, entity) entity.cw_edited.saved = entity._cw_is_saved = True sourceparams['parser'].after_entity_copy(entity, sourceparams) # call hooks for inlined relations call_hooks = self.repo.hm.call_hooks if self.should_call_hooks: for attr, value in relations: - call_hooks('before_add_relation', session, + call_hooks('before_add_relation', cnx, eidfrom=entity.eid, rtype=attr, eidto=value) - call_hooks('after_add_relation', session, + call_hooks('after_add_relation', cnx, eidfrom=entity.eid, rtype=attr, eidto=value) - def source_cwuris(self, session): + def source_cwuris(self, cnx): sql = ('SELECT extid, eid, type FROM entities, cw_source_relation ' 'WHERE entities.eid=cw_source_relation.eid_from ' 'AND cw_source_relation.eid_to=%s' % self.eid) return dict((b64decode(uri), (eid, type)) - for uri, eid, type in session.system_sql(sql).fetchall()) + for uri, eid, type in cnx.system_sql(sql).fetchall()) - def init_import_log(self, session, **kwargs): - dataimport = session.create_entity('CWDataImport', cw_import_of=self, + def init_import_log(self, cnx, **kwargs): + dataimport = cnx.create_entity('CWDataImport', cw_import_of=self, start_timestamp=datetime.utcnow(), **kwargs) dataimport.init() @@ -281,8 +277,8 @@ class DataFeedParser(AppObject): __registry__ = 'parsers' - def __init__(self, session, source, sourceuris=None, import_log=None, **kwargs): - super(DataFeedParser, self).__init__(session, **kwargs) + def __init__(self, cnx, source, sourceuris=None, import_log=None, **kwargs): + super(DataFeedParser, self).__init__(cnx, **kwargs) self.source = source self.sourceuris = sourceuris self.import_log = import_log @@ -309,21 +305,20 @@ """return an entity for the given uri. May return None if it should be skipped """ - session = self._cw + cnx = self._cw # if cwsource is specified and repository has a source with the same # name, call extid2eid on that source so entity will be properly seen as # coming from this source source_uri = sourceparams.pop('cwsource', None) if source_uri is not None and source_uri != 'system': - source = session.repo.sources_by_uri.get(source_uri, self.source) + source = cnx.repo.sources_by_uri.get(source_uri, self.source) else: source = self.source sourceparams['parser'] = self if isinstance(uri, unicode): uri = uri.encode('utf-8') try: - eid = session.repo.extid2eid(source, str(uri), etype, session, - complete=False, commit=False, + eid = cnx.repo.extid2eid(source, str(uri), etype, cnx, sourceparams=sourceparams) except ValidationError as ex: # XXX use critical so they are seen during tests. Should consider @@ -338,14 +333,14 @@ # Don't give etype to entity_from_eid so we get UnknownEid if the # entity has been removed try: - entity = session.entity_from_eid(-eid) + entity = cnx.entity_from_eid(-eid) except UnknownEid: return None self.notify_updated(entity) # avoid later update from the source's data return entity if self.sourceuris is not None: self.sourceuris.pop(str(uri), None) - return session.entity_from_eid(eid, etype) + return cnx.entity_from_eid(eid, etype) def process(self, url, raise_on_error=False): """main callback: process the url""" @@ -376,7 +371,7 @@ """ return True - def handle_deletion(self, config, session, myuris): + def handle_deletion(self, config, cnx, myuris): if config['delete-entities'] and myuris: byetype = {} for extid, (eid, etype) in myuris.iteritems(): @@ -384,10 +379,9 @@ byetype.setdefault(etype, []).append(str(eid)) for etype, eids in byetype.iteritems(): self.warning('delete %s %s entities', len(eids), etype) - session.set_cnxset() - session.execute('DELETE %s X WHERE X eid IN (%s)' - % (etype, ','.join(eids))) - session.commit() + cnx.execute('DELETE %s X WHERE X eid IN (%s)' + % (etype, ','.join(eids))) + cnx.commit() def update_if_necessary(self, entity, attrs): entity.complete(tuple(attrs)) @@ -415,29 +409,20 @@ self.import_log.record_error(str(ex)) return True error = False - # Check whether self._cw is a session or a connection - if getattr(self._cw, 'commit', None) is not None: - commit = self._cw.commit - set_cnxset = self._cw.set_cnxset - rollback = self._cw.rollback - else: - commit = self._cw.cnx.commit - set_cnxset = lambda: None - rollback = self._cw.cnx.rollback + commit = self._cw.commit + rollback = self._cw.rollback for args in parsed: try: self.process_item(*args) # commit+set_cnxset instead of commit(free_cnxset=False) to let # other a chance to get our connections set commit() - set_cnxset() except ValidationError as exc: if raise_on_error: raise self.source.error('Skipping %s because of validation error %s' % (args, exc)) rollback() - set_cnxset() error = True return error diff -r 84738d495ffd -r 793377697c81 server/sources/extlite.py --- a/server/sources/extlite.py Wed Sep 24 17:35:59 2014 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,302 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""provide an abstract class for external sources using a sqlite database helper -""" - -__docformat__ = "restructuredtext en" - - -from os.path import join, exists - -from cubicweb import server -from cubicweb.server.sqlutils import SQL_PREFIX, SQLAdapterMixIn, sqlexec -from cubicweb.server.sources import native, rql2sql -from cubicweb.server.sources import AbstractSource, dbg_st_search, dbg_results - -class ConnectionWrapper(object): - def __init__(self, source=None): - self.source = source - self._cnx = None - - def cursor(self): - if self._cnx is None: - self._cnx = self.source._sqlcnx - if server.DEBUG & server.DBG_SQL: - print 'sql cnx OPEN', self._cnx - return self._cnx.cursor() - - def commit(self): - if self._cnx is not None: - if server.DEBUG & (server.DBG_SQL | server.DBG_RQL): - print 'sql cnx COMMIT', self._cnx - self._cnx.commit() - - def rollback(self): - if self._cnx is not None: - if server.DEBUG & (server.DBG_SQL | server.DBG_RQL): - print 'sql cnx ROLLBACK', self._cnx - self._cnx.rollback() - - def close(self): - if self._cnx is not None: - if server.DEBUG & server.DBG_SQL: - print 'sql cnx CLOSE', self._cnx - self._cnx.close() - self._cnx = None - - -class SQLiteAbstractSource(AbstractSource): - """an abstract class for external sources using a sqlite database helper - """ - sqlgen_class = rql2sql.SQLGenerator - @classmethod - def set_nonsystem_types(cls): - # those entities are only in this source, we don't want them in the - # system source - for etype in cls.support_entities: - native.NONSYSTEM_ETYPES.add(etype) - for rtype in cls.support_relations: - native.NONSYSTEM_RELATIONS.add(rtype) - - options = ( - ('helper-db-path', - {'type' : 'string', - 'default': None, - 'help': 'path to the sqlite database file used to do queries on the \ -repository.', - 'level': 2, - }), - ) - - def __init__(self, repo, appschema, source_config, *args, **kwargs): - # the helper db is used to easy querying and will store everything but - # actual file content - dbpath = source_config.get('helper-db-path') - if dbpath is None: - dbpath = join(repo.config.appdatahome, - '%(uri)s.sqlite' % source_config) - self.dbpath = dbpath - self.sqladapter = SQLAdapterMixIn({'db-driver': 'sqlite', - 'db-name': dbpath}) - # those attributes have to be initialized before ancestor's __init__ - # which will call set_schema - self._need_sql_create = not exists(dbpath) - self._need_full_import = self._need_sql_create - AbstractSource.__init__(self, repo, appschema, source_config, - *args, **kwargs) - - def backup(self, backupfile, confirm): - """method called to create a backup of the source's data""" - self.close_source_connections() - try: - self.sqladapter.backup_to_file(backupfile, confirm) - finally: - self.open_source_connections() - - def restore(self, backupfile, confirm, drop): - """method called to restore a backup of source's data""" - self.close_source_connections() - try: - self.sqladapter.restore_from_file(backupfile, confirm, drop) - finally: - self.open_source_connections() - - @property - def _sqlcnx(self): - # XXX: sqlite connections can only be used in the same thread, so - # create a new one each time necessary. If it appears to be time - # consuming, find another way - return self.sqladapter.get_connection() - - def _is_schema_complete(self): - for etype in self.support_entities: - if not etype in self.schema: - self.warning('not ready to generate %s database, %s support missing from schema', - self.uri, etype) - return False - for rtype in self.support_relations: - if not rtype in self.schema: - self.warning('not ready to generate %s database, %s support missing from schema', - self.uri, rtype) - return False - return True - - def _create_database(self): - from yams.schema2sql import eschema2sql, rschema2sql - from cubicweb.toolsutils import restrict_perms_to_user - self.warning('initializing sqlite database for %s source' % self.uri) - cnx = self._sqlcnx - cu = cnx.cursor() - schema = self.schema - for etype in self.support_entities: - eschema = schema.eschema(etype) - createsqls = eschema2sql(self.sqladapter.dbhelper, eschema, - skip_relations=('data',), prefix=SQL_PREFIX) - sqlexec(createsqls, cu, withpb=False) - for rtype in self.support_relations: - rschema = schema.rschema(rtype) - if not rschema.inlined: - sqlexec(rschema2sql(rschema), cu, withpb=False) - cnx.commit() - cnx.close() - self._need_sql_create = False - if self.repo.config['uid']: - from logilab.common.shellutils import chown - # database file must be owned by the uid of the server process - self.warning('set %s as owner of the database file', - self.repo.config['uid']) - chown(self.dbpath, self.repo.config['uid']) - restrict_perms_to_user(self.dbpath, self.info) - - def set_schema(self, schema): - super(SQLiteAbstractSource, self).set_schema(schema) - if self._need_sql_create and self._is_schema_complete() and self.dbpath: - self._create_database() - self.rqlsqlgen = self.sqlgen_class(schema, self.sqladapter.dbhelper) - - def get_connection(self): - return ConnectionWrapper(self) - - def check_connection(self, cnx): - """check connection validity, return None if the connection is still valid - else a new connection (called when the connections set holding the given connection is - being attached to a session) - - always return the connection to reset eventually cached cursor - """ - return cnx - - def cnxset_freed(self, cnx): - """the connections set holding the given connection is being freed from its current - attached session: release the connection lock if the connection wrapper - has a connection set - """ - # reset _cnx to ensure next thread using cnx will get a new - # connection - cnx.close() - - def syntax_tree_search(self, session, union, args=None, cachekey=None, - varmap=None): - """return result from this source for a rql query (actually from a rql - syntax tree and a solution dictionary mapping each used variable to a - possible type). If cachekey is given, the query necessary to fetch the - results (but not the results themselves) may be cached using this key. - """ - if self._need_sql_create: - return [] - assert dbg_st_search(self.uri, union, varmap, args, cachekey) - sql, qargs, cbs = self.rqlsqlgen.generate(union, args) - args = self.sqladapter.merge_args(args, qargs) - cursor = self.doexec(session, sql, args) - results = self.sqladapter.process_result(cursor, cbs) - assert dbg_results(results) - return results - - def local_add_entity(self, session, entity): - """insert the entity in the local database. - - This is not provided as add_entity implementation since usually source - don't want to simply do this, so let raise NotImplementedError and the - source implementor may use this method if necessary - """ - attrs = self.sqladapter.preprocess_entity(entity) - sql = self.sqladapter.sqlgen.insert(SQL_PREFIX + str(entity.e_schema), attrs) - self.doexec(session, sql, attrs) - - def add_entity(self, session, entity): - """add a new entity to the source""" - raise NotImplementedError() - - def local_update_entity(self, session, entity, attrs=None): - """update an entity in the source - - This is not provided as update_entity implementation since usually - source don't want to simply do this, so let raise NotImplementedError - and the source implementor may use this method if necessary - """ - if attrs is None: - attrs = self.sqladapter.preprocess_entity(entity) - sql = self.sqladapter.sqlgen.update(SQL_PREFIX + str(entity.e_schema), - attrs, [SQL_PREFIX + 'eid']) - self.doexec(session, sql, attrs) - - def update_entity(self, session, entity): - """update an entity in the source""" - raise NotImplementedError() - - def delete_entity(self, session, entity): - """delete an entity from the source - - this is not deleting a file in the svn but deleting entities from the - source. Main usage is to delete repository content when a Repository - entity is deleted. - """ - attrs = {'cw_eid': entity.eid} - sql = self.sqladapter.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs) - self.doexec(session, sql, attrs) - - def local_add_relation(self, session, subject, rtype, object): - """add a relation to the source - - This is not provided as add_relation implementation since usually - source don't want to simply do this, so let raise NotImplementedError - and the source implementor may use this method if necessary - """ - attrs = {'eid_from': subject, 'eid_to': object} - sql = self.sqladapter.sqlgen.insert('%s_relation' % rtype, attrs) - self.doexec(session, sql, attrs) - - def add_relation(self, session, subject, rtype, object): - """add a relation to the source""" - raise NotImplementedError() - - def delete_relation(self, session, subject, rtype, object): - """delete a relation from the source""" - rschema = self.schema.rschema(rtype) - if rschema.inlined: - if subject in session.transaction_data.get('pendingeids', ()): - return - table = SQL_PREFIX + session.describe(subject)[0] - column = SQL_PREFIX + rtype - sql = 'UPDATE %s SET %s=NULL WHERE %seid=%%(eid)s' % (table, column, SQL_PREFIX) - attrs = {'eid' : subject} - else: - attrs = {'eid_from': subject, 'eid_to': object} - sql = self.sqladapter.sqlgen.delete('%s_relation' % rtype, attrs) - self.doexec(session, sql, attrs) - - def doexec(self, session, query, args=None): - """Execute a query. - it's a function just so that it shows up in profiling - """ - if server.DEBUG: - print 'exec', query, args - cursor = session.cnxset[self.uri] - try: - # str(query) to avoid error if it's a unicode string - cursor.execute(str(query), args) - except Exception as ex: - self.critical("sql: %r\n args: %s\ndbms message: %r", - query, args, ex.args[0]) - try: - session.cnxset.connection(self.uri).rollback() - self.critical('transaction has been rolled back') - except Exception: - pass - raise - return cursor diff -r 84738d495ffd -r 793377697c81 server/sources/ldapfeed.py --- a/server/sources/ldapfeed.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/sources/ldapfeed.py Wed Sep 24 18:04:30 2014 +0200 @@ -17,24 +17,39 @@ # with CubicWeb. If not, see . """cubicweb ldap feed source""" +from __future__ import division # XXX why? + +from datetime import datetime + import ldap +from ldap.ldapobject import ReconnectLDAPObject from ldap.filter import filter_format +from ldapurl import LDAPUrl from logilab.common.configuration import merge_options +from cubicweb import ValidationError, AuthenticationError, Binary +from cubicweb.server import utils from cubicweb.server.sources import datafeed -from cubicweb.server import ldaputils, utils -from cubicweb import Binary _ = unicode # search scopes -ldapscope = {'BASE': ldap.SCOPE_BASE, - 'ONELEVEL': ldap.SCOPE_ONELEVEL, - 'SUBTREE': ldap.SCOPE_SUBTREE} +BASE = ldap.SCOPE_BASE +ONELEVEL = ldap.SCOPE_ONELEVEL +SUBTREE = ldap.SCOPE_SUBTREE +LDAP_SCOPES = {'BASE': ldap.SCOPE_BASE, + 'ONELEVEL': ldap.SCOPE_ONELEVEL, + 'SUBTREE': ldap.SCOPE_SUBTREE} -class LDAPFeedSource(ldaputils.LDAPSourceMixIn, - datafeed.DataFeedSource): +# map ldap protocol to their standard port +PROTO_PORT = {'ldap': 389, + 'ldaps': 636, + 'ldapi': None, + } + + +class LDAPFeedSource(datafeed.DataFeedSource): """LDAP feed source: unlike ldapuser source, this source is copy based and will import ldap content (beside passwords for authentication) into the system source. @@ -42,7 +57,79 @@ support_entities = {'CWUser': False} use_cwuri_as_url = False - options_group = ( + options = ( + ('auth-mode', + {'type' : 'choice', + 'default': 'simple', + 'choices': ('simple', 'cram_md5', 'digest_md5', 'gssapi'), + 'help': 'authentication mode used to authenticate user to the ldap.', + 'group': 'ldap-source', 'level': 3, + }), + ('auth-realm', + {'type' : 'string', + 'default': None, + 'help': 'realm to use when using gssapi/kerberos authentication.', + 'group': 'ldap-source', 'level': 3, + }), + + ('data-cnx-dn', + {'type' : 'string', + 'default': '', + 'help': 'user dn to use to open data connection to the ldap (eg used \ +to respond to rql queries). Leave empty for anonymous bind', + 'group': 'ldap-source', 'level': 1, + }), + ('data-cnx-password', + {'type' : 'string', + 'default': '', + 'help': 'password to use to open data connection to the ldap (eg used to respond to rql queries). Leave empty for anonymous bind.', + 'group': 'ldap-source', 'level': 1, + }), + + ('user-base-dn', + {'type' : 'string', + 'default': '', + 'help': 'base DN to lookup for users; disable user importation mechanism if unset', + 'group': 'ldap-source', 'level': 1, + }), + ('user-scope', + {'type' : 'choice', + 'default': 'ONELEVEL', + 'choices': ('BASE', 'ONELEVEL', 'SUBTREE'), + 'help': 'user search scope (valid values: "BASE", "ONELEVEL", "SUBTREE")', + 'group': 'ldap-source', 'level': 1, + }), + ('user-classes', + {'type' : 'csv', + 'default': ('top', 'posixAccount'), + 'help': 'classes of user (with Active Directory, you want to say "user" here)', + 'group': 'ldap-source', 'level': 1, + }), + ('user-filter', + {'type': 'string', + 'default': '', + 'help': 'additional filters to be set in the ldap query to find valid users', + 'group': 'ldap-source', 'level': 2, + }), + ('user-login-attr', + {'type' : 'string', + 'default': 'uid', + 'help': 'attribute used as login on authentication (with Active Directory, you want to use "sAMAccountName" here)', + 'group': 'ldap-source', 'level': 1, + }), + ('user-default-group', + {'type' : 'csv', + 'default': ('users',), + 'help': 'name of a group in which ldap users will be by default. \ +You can set multiple groups by separating them by a comma.', + 'group': 'ldap-source', 'level': 1, + }), + ('user-attrs-map', + {'type' : 'named', + 'default': {'uid': 'login', 'gecos': 'email', 'userPassword': 'upassword'}, + 'help': 'map from ldap user attributes to cubicweb attributes (with Active Directory, you want to use sAMAccountName:login,mail:email,givenName:firstname,sn:surname)', + 'group': 'ldap-source', 'level': 1, + }), ('group-base-dn', {'type' : 'string', 'default': '', @@ -76,18 +163,33 @@ }), ) - options = merge_options(datafeed.DataFeedSource.options - + ldaputils.LDAPSourceMixIn.options - + options_group, + options = merge_options(datafeed.DataFeedSource.options + options, optgroup='ldap-source',) + _conn = None + def update_config(self, source_entity, typedconfig): """update configuration from source entity. `typedconfig` is config properly typed with defaults set """ super(LDAPFeedSource, self).update_config(source_entity, typedconfig) + self.authmode = typedconfig['auth-mode'] + self._authenticate = getattr(self, '_auth_%s' % self.authmode) + self.cnx_dn = typedconfig['data-cnx-dn'] + self.cnx_pwd = typedconfig['data-cnx-password'] + self.user_base_dn = str(typedconfig['user-base-dn']) + self.user_base_scope = globals()[typedconfig['user-scope']] + self.user_login_attr = typedconfig['user-login-attr'] + self.user_default_groups = typedconfig['user-default-group'] + self.user_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'} + self.user_attrs.update(typedconfig['user-attrs-map']) + self.user_rev_attrs = dict((v, k) for k, v in self.user_attrs.iteritems()) + self.base_filters = [filter_format('(%s=%s)', ('objectClass', o)) + for o in typedconfig['user-classes']] + if typedconfig['user-filter']: + self.base_filters.append(typedconfig['user-filter']) self.group_base_dn = str(typedconfig['group-base-dn']) - self.group_base_scope = ldapscope[typedconfig['group-scope']] + self.group_base_scope = LDAP_SCOPES[typedconfig['group-scope']] self.group_attrs = typedconfig['group-attrs-map'] self.group_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'} self.group_attrs.update(typedconfig['group-attrs-map']) @@ -96,11 +198,191 @@ for o in typedconfig['group-classes']] if typedconfig['group-filter']: self.group_base_filters.append(typedconfig['group-filter']) + self._conn = None + + def _entity_update(self, source_entity): + super(LDAPFeedSource, self)._entity_update(source_entity) + if self.urls: + if len(self.urls) > 1: + raise ValidationError(source_entity.eid, {'url': _('can only have one url')}) + try: + protocol, hostport = self.urls[0].split('://') + except ValueError: + raise ValidationError(source_entity.eid, {'url': _('badly formatted url')}) + if protocol not in PROTO_PORT: + raise ValidationError(source_entity.eid, {'url': _('unsupported protocol')}) + + def connection_info(self): + assert len(self.urls) == 1, self.urls + protocol, hostport = self.urls[0].split('://') + if protocol != 'ldapi' and not ':' in hostport: + hostport = '%s:%s' % (hostport, PROTO_PORT[protocol]) + return protocol, hostport + + def authenticate(self, cnx, login, password=None, **kwargs): + """return CWUser eid for the given login/password if this account is + defined in this source, else raise `AuthenticationError` + + two queries are needed since passwords are stored crypted, so we have + to fetch the salt first + """ + self.info('ldap authenticate %s', login) + if not password: + # On Windows + ADAM this would have succeeded (!!!) + # You get Authenticated as: 'NT AUTHORITY\ANONYMOUS LOGON'. + # we really really don't want that + raise AuthenticationError() + searchfilter = [filter_format('(%s=%s)', (self.user_login_attr, login))] + searchfilter.extend(self.base_filters) + searchstr = '(&%s)' % ''.join(searchfilter) + # first search the user + try: + user = self._search(cnx, self.user_base_dn, + self.user_base_scope, searchstr)[0] + except (IndexError, ldap.SERVER_DOWN): + # no such user + raise AuthenticationError() + # check password by establishing a (unused) connection + try: + self._connect(user, password) + except ldap.LDAPError as ex: + # Something went wrong, most likely bad credentials + self.info('while trying to authenticate %s: %s', user, ex) + raise AuthenticationError() + except Exception: + self.error('while trying to authenticate %s', user, exc_info=True) + raise AuthenticationError() + eid = self.repo.extid2eid(self, user['dn'], 'CWUser', cnx, insert=False) + if eid < 0: + # user has been moved away from this source + raise AuthenticationError() + return eid + + def _connect(self, user=None, userpwd=None): + protocol, hostport = self.connection_info() + self.info('connecting %s://%s as %s', protocol, hostport, + user and user['dn'] or 'anonymous') + # don't require server certificate when using ldaps (will + # enable self signed certs) + ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER) + url = LDAPUrl(urlscheme=protocol, hostport=hostport) + conn = ReconnectLDAPObject(url.initializeUrl()) + # Set the protocol version - version 3 is preferred + try: + conn.set_option(ldap.OPT_PROTOCOL_VERSION, ldap.VERSION3) + except ldap.LDAPError: # Invalid protocol version, fall back safely + conn.set_option(ldap.OPT_PROTOCOL_VERSION, ldap.VERSION2) + # Deny auto-chasing of referrals to be safe, we handle them instead + # Required for AD + try: + conn.set_option(ldap.OPT_REFERRALS, 0) + except ldap.LDAPError: # Cannot set referrals, so do nothing + pass + #conn.set_option(ldap.OPT_NETWORK_TIMEOUT, conn_timeout) + #conn.timeout = op_timeout + # Now bind with the credentials given. Let exceptions propagate out. + if user is None: + # XXX always use simple bind for data connection + if not self.cnx_dn: + conn.simple_bind_s(self.cnx_dn, self.cnx_pwd) + else: + self._authenticate(conn, {'dn': self.cnx_dn}, self.cnx_pwd) + else: + # user specified, we want to check user/password, no need to return + # the connection which will be thrown out + self._authenticate(conn, user, userpwd) + return conn + + def _auth_simple(self, conn, user, userpwd): + conn.simple_bind_s(user['dn'], userpwd) + + def _auth_cram_md5(self, conn, user, userpwd): + from ldap import sasl + auth_token = sasl.cram_md5(user['dn'], userpwd) + conn.sasl_interactive_bind_s('', auth_token) + + def _auth_digest_md5(self, conn, user, userpwd): + from ldap import sasl + auth_token = sasl.digest_md5(user['dn'], userpwd) + conn.sasl_interactive_bind_s('', auth_token) + + def _auth_gssapi(self, conn, user, userpwd): + # print XXX not proper sasl/gssapi + import kerberos + if not kerberos.checkPassword(user[self.user_login_attr], userpwd): + raise Exception('BAD login / mdp') + #from ldap import sasl + #conn.sasl_interactive_bind_s('', sasl.gssapi()) + + def _search(self, cnx, base, scope, + searchstr='(objectClass=*)', attrs=()): + """make an ldap query""" + self.debug('ldap search %s %s %s %s %s', self.uri, base, scope, + searchstr, list(attrs)) + if self._conn is None: + self._conn = self._connect() + ldapcnx = self._conn + try: + res = ldapcnx.search_s(base, scope, searchstr, attrs) + except ldap.PARTIAL_RESULTS: + res = ldapcnx.result(all=0)[1] + except ldap.NO_SUCH_OBJECT: + self.info('ldap NO SUCH OBJECT %s %s %s', base, scope, searchstr) + self._process_no_such_object(cnx, base) + return [] + # except ldap.REFERRAL as e: + # ldapcnx = self.handle_referral(e) + # try: + # res = ldapcnx.search_s(base, scope, searchstr, attrs) + # except ldap.PARTIAL_RESULTS: + # res_type, res = ldapcnx.result(all=0) + result = [] + for rec_dn, rec_dict in res: + # When used against Active Directory, "rec_dict" may not be + # be a dictionary in some cases (instead, it can be a list) + # + # An example of a useless "res" entry that can be ignored + # from AD is + # (None, ['ldap://ForestDnsZones.PORTAL.LOCAL/DC=ForestDnsZones,DC=PORTAL,DC=LOCAL']) + # This appears to be some sort of internal referral, but + # we can't handle it, so we need to skip over it. + try: + items = rec_dict.iteritems() + except AttributeError: + continue + else: + itemdict = self._process_ldap_item(rec_dn, items) + result.append(itemdict) + self.debug('ldap built results %s', len(result)) + return result def _process_ldap_item(self, dn, iterator): - itemdict = super(LDAPFeedSource, self)._process_ldap_item(dn, iterator) + """Turn an ldap received item into a proper dict.""" + itemdict = {'dn': dn} + for key, value in iterator: + if self.user_attrs.get(key) == 'upassword': # XXx better password detection + value = value[0].encode('utf-8') + # we only support ldap_salted_sha1 for ldap sources, see: server/utils.py + if not value.startswith('{SSHA}'): + value = utils.crypt_password(value) + itemdict[key] = Binary(value) + elif self.user_attrs.get(key) == 'modification_date': + itemdict[key] = datetime.strptime(value[0], '%Y%m%d%H%M%SZ') + else: + value = [unicode(val, 'utf-8', 'replace') for val in value] + if len(value) == 1: + itemdict[key] = value = value[0] + else: + itemdict[key] = value # we expect memberUid to be a list of user ids, make sure of it member = self.group_rev_attrs['member'] if isinstance(itemdict.get(member), basestring): itemdict[member] = [itemdict[member]] return itemdict + + def _process_no_such_object(self, cnx, dn): + """Some search return NO_SUCH_OBJECT error, handle this (usually because + an object whose dn is no more existent in ldap as been encountered). + + Do nothing by default, let sub-classes handle that. + """ diff -r 84738d495ffd -r 793377697c81 server/sources/native.py --- a/server/sources/native.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/sources/native.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -25,17 +25,13 @@ """ __docformat__ = "restructuredtext en" -try: - from cPickle import loads, dumps - import cPickle as pickle -except ImportError: - from pickle import loads, dumps - import pickle +from cPickle import loads, dumps +import cPickle as pickle from threading import Lock from datetime import datetime from base64 import b64decode, b64encode from contextlib import contextmanager -from os.path import abspath, basename +from os.path import basename import re import itertools import zipfile @@ -51,7 +47,7 @@ from yams.schema import role_name from cubicweb import (UnknownEid, AuthenticationError, ValidationError, Binary, - UniqueTogetherError, QueryError, UndoTransactionException) + UniqueTogetherError, UndoTransactionException) from cubicweb import transaction as tx, server, neg_role from cubicweb.utils import QueryCache from cubicweb.schema import VIRTUAL_RTYPES @@ -94,37 +90,6 @@ return self.cu.fetchone() -def make_schema(selected, solution, table, typemap): - """return a sql schema to store RQL query result""" - sql = [] - varmap = {} - for i, term in enumerate(selected): - name = 'C%s' % i - key = term.as_string() - varmap[key] = '%s.%s' % (table, name) - ttype = term.get_type(solution) - try: - sql.append('%s %s' % (name, typemap[ttype])) - except KeyError: - # assert not schema(ttype).final - sql.append('%s %s' % (name, typemap['Int'])) - return ','.join(sql), varmap - - -def _modified_sql(table, etypes): - # XXX protect against sql injection - if len(etypes) > 1: - restr = 'type IN (%s)' % ','.join("'%s'" % etype for etype in etypes) - else: - restr = "type='%s'" % etypes[0] - if table == 'entities': - attr = 'mtime' - else: - attr = 'dtime' - return 'SELECT type, eid FROM %s WHERE %s AND %s > %%(time)s' % ( - table, restr, attr) - - def sql_or_clauses(sql, clauses): select, restr = sql.split(' WHERE ', 1) restrclauses = restr.split(' AND ') @@ -137,6 +102,7 @@ restr = '(%s)' % ' OR '.join(clauses) return '%s WHERE %s' % (select, restr) + def rdef_table_column(rdef): """return table and column used to store the given relation definition in the database @@ -144,6 +110,7 @@ return (SQL_PREFIX + str(rdef.subject), SQL_PREFIX + str(rdef.rtype)) + def rdef_physical_info(dbhelper, rdef): """return backend type and a boolean flag if NULL values should be allowed for a given relation definition @@ -181,34 +148,34 @@ 'rtype': rdef.rtype, 'eid': tentity.eid}) -def _undo_rel_info(session, subj, rtype, obj): +def _undo_rel_info(cnx, subj, rtype, obj): entities = [] for role, eid in (('subject', subj), ('object', obj)): try: - entities.append(session.entity_from_eid(eid)) + entities.append(cnx.entity_from_eid(eid)) except UnknownEid: - raise _UndoException(session._( + raise _UndoException(cnx._( "Can't restore relation %(rtype)s, %(role)s entity %(eid)s" " doesn't exist anymore.") - % {'role': session._(role), - 'rtype': session._(rtype), + % {'role': cnx._(role), + 'rtype': cnx._(rtype), 'eid': eid}) sentity, oentity = entities try: - rschema = session.vreg.schema.rschema(rtype) + rschema = cnx.vreg.schema.rschema(rtype) rdef = rschema.rdefs[(sentity.cw_etype, oentity.cw_etype)] except KeyError: - raise _UndoException(session._( + raise _UndoException(cnx._( "Can't restore relation %(rtype)s between %(subj)s and " "%(obj)s, that relation does not exists anymore in the " "schema.") - % {'rtype': session._(rtype), + % {'rtype': cnx._(rtype), 'subj': subj, 'obj': obj}) return sentity, oentity, rdef -def _undo_has_later_transaction(session, eid): - return session.system_sql('''\ +def _undo_has_later_transaction(cnx, eid): + return cnx.system_sql('''\ SELECT T.tx_uuid FROM transactions AS TREF, transactions AS T WHERE TREF.tx_uuid='%(txuuid)s' AND T.tx_uuid!='%(txuuid)s' AND T.tx_time>=TREF.tx_time @@ -217,10 +184,85 @@ OR EXISTS(SELECT 1 FROM tx_relation_actions as TRA WHERE TRA.tx_uuid=T.tx_uuid AND ( TRA.eid_from=%(eid)s OR TRA.eid_to=%(eid)s)) - )''' % {'txuuid': session.transaction_data['undoing_uuid'], + )''' % {'txuuid': cnx.transaction_data['undoing_uuid'], 'eid': eid}).fetchone() +class DefaultEidGenerator(object): + __slots__ = ('source', 'cnx', 'lock') + + def __init__(self, source): + self.source = source + self.cnx = None + self.lock = Lock() + + def close(self): + if self.cnx: + self.cnx.close() + self.cnx = None + + def create_eid(self, _cnx, count=1): + # lock needed to prevent 'Connection is busy with results for another + # command (0)' errors with SQLServer + assert count > 0 + with self.lock: + return self._create_eid(count) + + def _create_eid(self, count): + # internal function doing the eid creation without locking. + # needed for the recursive handling of disconnections (otherwise we + # deadlock on self._eid_cnx_lock + source = self.source + if self.cnx is None: + self.cnx = source.get_connection() + cnx = self.cnx + try: + cursor = cnx.cursor() + for sql in source.dbhelper.sqls_increment_numrange('entities_id_seq', count): + cursor.execute(sql) + eid = cursor.fetchone()[0] + except (source.OperationalError, source.InterfaceError): + # FIXME: better detection of deconnection pb + source.warning("trying to reconnect create eid connection") + self.cnx = None + return self._create_eid(count) + except source.DbapiError as exc: + # We get this one with pyodbc and SQL Server when connection was reset + if exc.args[0] == '08S01': + source.warning("trying to reconnect create eid connection") + self.cnx = None + return self._create_eid(count) + else: + raise + except Exception: # WTF? + cnx.rollback() + self.cnx = None + source.exception('create eid failed in an unforeseen way on SQL statement %s', sql) + raise + else: + cnx.commit() + return eid + + +class SQLITEEidGenerator(object): + __slots__ = ('source', 'lock') + + def __init__(self, source): + self.source = source + self.lock = Lock() + + def close(self): + pass + + def create_eid(self, cnx, count=1): + assert count > 0 + source = self.source + with self.lock: + for sql in source.dbhelper.sqls_increment_numrange('entities_id_seq', count): + cursor = source.doexec(cnx, sql) + return cursor.fetchone()[0] + + class NativeSQLSource(SQLAdapterMixIn, AbstractSource): """adapter for source using the native cubicweb schema (see below) """ @@ -291,38 +333,14 @@ self.do_fti = not repo.config['delay-full-text-indexation'] # sql queries cache self._cache = QueryCache(repo.config['rql-cache-size']) - self._temp_table_data = {} - # we need a lock to protect eid attribution function (XXX, really? - # explain) - self._eid_cnx_lock = Lock() - self._eid_creation_cnx = None # (etype, attr) / storage mapping self._storages = {} - # entity types that may be used by other multi-sources instances - self.multisources_etypes = set(repo.config['multi-sources-etypes']) - # XXX no_sqlite_wrap trick since we've a sqlite locking pb when - # running unittest_multisources with the wrapping below - if self.dbdriver == 'sqlite' and \ - not getattr(repo.config, 'no_sqlite_wrap', False): - from cubicweb.server.sources.extlite import ConnectionWrapper - self.dbhelper.dbname = abspath(self.dbhelper.dbname) - self.get_connection = lambda: ConnectionWrapper(self) - self.check_connection = lambda cnx: cnx - def cnxset_freed(cnx): - cnx.close() - self.cnxset_freed = cnxset_freed + self.binary_to_str = self.dbhelper.dbapi_module.binary_to_str if self.dbdriver == 'sqlite': - self._create_eid = None - self.create_eid = self._create_eid_sqlite - self.binary_to_str = self.dbhelper.dbapi_module.binary_to_str - - - @property - def _sqlcnx(self): - # XXX: sqlite connections can only be used in the same thread, so - # create a new one each time necessary. If it appears to be time - # consuming, find another way - return SQLAdapterMixIn.get_connection(self) + self.eid_generator = SQLITEEidGenerator(self) + else: + self.eid_generator = DefaultEidGenerator(self) + self.create_eid = self.eid_generator.create_eid def check_config(self, source_entity): """check configuration of source entity""" @@ -346,19 +364,18 @@ self._cache.pop('Any X WHERE X eid %s' % eid, None) self._cache.pop('Any %s' % eid, None) - def sqlexec(self, session, sql, args=None): + def sqlexec(self, cnx, sql, args=None): """execute the query and return its result""" - return self.process_result(self.doexec(session, sql, args)) + return self.process_result(self.doexec(cnx, sql, args)) def init_creating(self, cnxset=None): # check full text index availibility if self.do_fti: if cnxset is None: _cnxset = self.repo._get_cnxset() - _cnxset.cnxset_set() else: _cnxset = cnxset - if not self.dbhelper.has_fti_table(_cnxset['system']): + if not self.dbhelper.has_fti_table(_cnxset.cu): if not self.repo.config.creating: self.critical('no text index table') self.do_fti = False @@ -419,9 +436,7 @@ self.init_creating(source_entity._cw.cnxset) def shutdown(self): - if self._eid_creation_cnx: - self._eid_creation_cnx.close() - self._eid_creation_cnx = None + self.eid_generator.close() # XXX deprecates [un]map_attribute? def map_attribute(self, etype, attr, cb, sourcedb=True): @@ -490,21 +505,18 @@ # can't claim not supporting a relation return True #not rtype == 'content_for' - def may_cross_relation(self, rtype): - return True - - def authenticate(self, session, login, **kwargs): + def authenticate(self, cnx, login, **kwargs): """return CWUser eid for the given login and other authentication information found in kwargs, else raise `AuthenticationError` """ for authentifier in self.authentifiers: try: - return authentifier.authenticate(session, login, **kwargs) + return authentifier.authenticate(cnx, login, **kwargs) except AuthenticationError: continue raise AuthenticationError() - def syntax_tree_search(self, session, union, args=None, cachekey=None, + def syntax_tree_search(self, cnx, union, args=None, cachekey=None, varmap=None): """return result from this source for a rql query (actually from a rql syntax tree and a solution dictionary mapping each used @@ -530,74 +542,28 @@ args = self.merge_args(args, qargs) assert isinstance(sql, basestring), repr(sql) try: - cursor = self.doexec(session, sql, args) + cursor = self.doexec(cnx, sql, args) except (self.OperationalError, self.InterfaceError): - if session.mode == 'write': + if cnx.mode == 'write': # do not attempt to reconnect if there has been some write # during the transaction raise # FIXME: better detection of deconnection pb self.warning("trying to reconnect") - session.cnxset.reconnect(self) - cursor = self.doexec(session, sql, args) + cnx.cnxset.reconnect() + cursor = self.doexec(cnx, sql, args) except self.DbapiError as exc: # We get this one with pyodbc and SQL Server when connection was reset - if exc.args[0] == '08S01' and session.mode != 'write': + if exc.args[0] == '08S01' and cnx.mode != 'write': self.warning("trying to reconnect") - session.cnxset.reconnect(self) - cursor = self.doexec(session, sql, args) + cnx.cnxset.reconnect() + cursor = self.doexec(cnx, sql, args) else: raise - results = self.process_result(cursor, cbs, session=session) + results = self.process_result(cursor, cbs, session=cnx) assert dbg_results(results) return results - def flying_insert(self, table, session, union, args=None, varmap=None): - """similar as .syntax_tree_search, but inserts data in the - temporary table (on-the-fly if possible, eg for the system - source whose the given cursor come from). If not possible, - inserts all data by calling .executemany(). - """ - assert dbg_st_search( - self.uri, union, varmap, args, - prefix='ON THE FLY temp data insertion into %s from' % table) - # generate sql queries if we are able to do so - sql, qargs, cbs = self._rql_sqlgen.generate(union, args, varmap) - query = 'INSERT INTO %s %s' % (table, sql.encode(self._dbencoding)) - self.doexec(session, query, self.merge_args(args, qargs)) - - def manual_insert(self, results, table, session): - """insert given result into a temporary table on the system source""" - if server.DEBUG & server.DBG_RQL: - print ' manual insertion of', len(results), 'results into', table - if not results: - return - query_args = ['%%(%s)s' % i for i in xrange(len(results[0]))] - query = 'INSERT INTO %s VALUES(%s)' % (table, ','.join(query_args)) - kwargs_list = [] - for row in results: - kwargs = {} - row = tuple(row) - for index, cell in enumerate(row): - if isinstance(cell, Binary): - cell = self._binary(cell.getvalue()) - kwargs[str(index)] = cell - kwargs_list.append(kwargs) - self.doexecmany(session, query, kwargs_list) - - def clean_temp_data(self, session, temptables): - """remove temporary data, usually associated to temporary tables""" - if temptables: - for table in temptables: - try: - self.doexec(session,'DROP TABLE %s' % table) - except Exception: - pass - try: - del self._temp_table_data[table] - except KeyError: - continue - @contextmanager def _storage_handler(self, entity, event): # 1/ memorize values as they are before the storage is called. @@ -629,60 +595,60 @@ for entity, attr, value in restore_values: entity.cw_edited.edited_attribute(attr, value) - def add_entity(self, session, entity): + def add_entity(self, cnx, entity): """add a new entity to the source""" with self._storage_handler(entity, 'added'): attrs = self.preprocess_entity(entity) sql = self.sqlgen.insert(SQL_PREFIX + entity.cw_etype, attrs) - self.doexec(session, sql, attrs) - if session.ertype_supports_undo(entity.cw_etype): - self._record_tx_action(session, 'tx_entity_actions', 'C', + self.doexec(cnx, sql, attrs) + if cnx.ertype_supports_undo(entity.cw_etype): + self._record_tx_action(cnx, 'tx_entity_actions', 'C', etype=entity.cw_etype, eid=entity.eid) - def update_entity(self, session, entity): + def update_entity(self, cnx, entity): """replace an entity in the source""" with self._storage_handler(entity, 'updated'): attrs = self.preprocess_entity(entity) - if session.ertype_supports_undo(entity.cw_etype): - changes = self._save_attrs(session, entity, attrs) - self._record_tx_action(session, 'tx_entity_actions', 'U', + if cnx.ertype_supports_undo(entity.cw_etype): + changes = self._save_attrs(cnx, entity, attrs) + self._record_tx_action(cnx, 'tx_entity_actions', 'U', etype=entity.cw_etype, eid=entity.eid, changes=self._binary(dumps(changes))) sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, attrs, ['cw_eid']) - self.doexec(session, sql, attrs) + self.doexec(cnx, sql, attrs) - def delete_entity(self, session, entity): + def delete_entity(self, cnx, entity): """delete an entity from the source""" with self._storage_handler(entity, 'deleted'): - if session.ertype_supports_undo(entity.cw_etype): + if cnx.ertype_supports_undo(entity.cw_etype): attrs = [SQL_PREFIX + r.type for r in entity.e_schema.subject_relations() if (r.final or r.inlined) and not r in VIRTUAL_RTYPES] - changes = self._save_attrs(session, entity, attrs) - self._record_tx_action(session, 'tx_entity_actions', 'D', + changes = self._save_attrs(cnx, entity, attrs) + self._record_tx_action(cnx, 'tx_entity_actions', 'D', etype=entity.cw_etype, eid=entity.eid, changes=self._binary(dumps(changes))) attrs = {'cw_eid': entity.eid} sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs) - self.doexec(session, sql, attrs) + self.doexec(cnx, sql, attrs) - def add_relation(self, session, subject, rtype, object, inlined=False): + def add_relation(self, cnx, subject, rtype, object, inlined=False): """add a relation to the source""" - self._add_relations(session, rtype, [(subject, object)], inlined) - if session.ertype_supports_undo(rtype): - self._record_tx_action(session, 'tx_relation_actions', 'A', + self._add_relations(cnx, rtype, [(subject, object)], inlined) + if cnx.ertype_supports_undo(rtype): + self._record_tx_action(cnx, 'tx_relation_actions', 'A', eid_from=subject, rtype=rtype, eid_to=object) - def add_relations(self, session, rtype, subj_obj_list, inlined=False): + def add_relations(self, cnx, rtype, subj_obj_list, inlined=False): """add a relations to the source""" - self._add_relations(session, rtype, subj_obj_list, inlined) - if session.ertype_supports_undo(rtype): + self._add_relations(cnx, rtype, subj_obj_list, inlined) + if cnx.ertype_supports_undo(rtype): for subject, object in subj_obj_list: - self._record_tx_action(session, 'tx_relation_actions', 'A', + self._record_tx_action(cnx, 'tx_relation_actions', 'A', eid_from=subject, rtype=rtype, eid_to=object) - def _add_relations(self, session, rtype, subj_obj_list, inlined=False): + def _add_relations(self, cnx, rtype, subj_obj_list, inlined=False): """add a relation to the source""" sql = [] if inlined is False: @@ -692,7 +658,7 @@ else: # used by data import etypes = {} for subject, object in subj_obj_list: - etype = session.describe(subject)[0] + etype = cnx.entity_metas(subject)['type'] if etype in etypes: etypes[etype].append((subject, object)) else: @@ -704,20 +670,20 @@ ['cw_eid']), attrs)) for statement, attrs in sql: - self.doexecmany(session, statement, attrs) + self.doexecmany(cnx, statement, attrs) - def delete_relation(self, session, subject, rtype, object): + def delete_relation(self, cnx, subject, rtype, object): """delete a relation from the source""" rschema = self.schema.rschema(rtype) - self._delete_relation(session, subject, rtype, object, rschema.inlined) - if session.ertype_supports_undo(rtype): - self._record_tx_action(session, 'tx_relation_actions', 'R', + self._delete_relation(cnx, subject, rtype, object, rschema.inlined) + if cnx.ertype_supports_undo(rtype): + self._record_tx_action(cnx, 'tx_relation_actions', 'R', eid_from=subject, rtype=rtype, eid_to=object) - def _delete_relation(self, session, subject, rtype, object, inlined=False): + def _delete_relation(self, cnx, subject, rtype, object, inlined=False): """delete a relation from the source""" if inlined: - table = SQL_PREFIX + session.describe(subject)[0] + table = SQL_PREFIX + cnx.entity_metas(subject)['type'] column = SQL_PREFIX + rtype sql = 'UPDATE %s SET %s=NULL WHERE %seid=%%(eid)s' % (table, column, SQL_PREFIX) @@ -725,16 +691,16 @@ else: attrs = {'eid_from': subject, 'eid_to': object} sql = self.sqlgen.delete('%s_relation' % rtype, attrs) - self.doexec(session, sql, attrs) + self.doexec(cnx, sql, attrs) - def doexec(self, session, query, args=None, rollback=True): + def doexec(self, cnx, query, args=None, rollback=True): """Execute a query. it's a function just so that it shows up in profiling """ - cursor = session.cnxset[self.uri] + cursor = cnx.cnxset.cu if server.DEBUG & server.DBG_SQL: - cnx = session.cnxset.connection(self.uri) - # getattr to get the actual connection if cnx is a ConnectionWrapper + cnx = cnx.cnxset.cnx + # getattr to get the actual connection if cnx is a CnxLoggingWrapper # instance print 'exec', query, args, getattr(cnx, '_cnx', cnx) try: @@ -748,7 +714,7 @@ query, args, ex.args[0]) if rollback: try: - session.cnxset.connection(self.uri).rollback() + cnx.cnxset.rollback() if self.repo.config.mode != 'test': self.critical('transaction has been rolled back') except Exception as ex: @@ -759,24 +725,31 @@ # postgres, sqlserver mo = re.search("unique_[a-z0-9]{32}", arg) if mo is not None: - raise UniqueTogetherError(session, cstrname=mo.group(0)) - # sqlite + raise UniqueTogetherError(cnx, cstrname=mo.group(0)) + # old sqlite mo = re.search('columns (.*) are not unique', arg) if mo is not None: # sqlite in use # we left chop the 'cw_' prefix of attribute names rtypes = [c.strip()[3:] for c in mo.group(1).split(',')] - raise UniqueTogetherError(session, rtypes=rtypes) + raise UniqueTogetherError(cnx, rtypes=rtypes) + # sqlite after http://www.sqlite.org/cgi/src/info/c80e229dd9c1230a + if arg.startswith('UNIQUE constraint failed:'): + # message looks like: "UNIQUE constraint failed: foo.cw_bar, foo.cw_baz" + # so drop the prefix, split on comma, drop the tablenames, and drop "cw_" + columns = arg.split(':', 1)[1].split(',') + rtypes = [c.split('.', 1)[1].strip()[3:] for c in columns] + raise UniqueTogetherError(cnx, rtypes=rtypes) raise return cursor - def doexecmany(self, session, query, args): + def doexecmany(self, cnx, query, args): """Execute a query. it's a function just so that it shows up in profiling """ if server.DEBUG & server.DBG_SQL: print 'execmany', query, 'with', len(args), 'arguments' - cursor = session.cnxset[self.uri] + cursor = cnx.cnxset.cu try: # str(query) to avoid error if it's a unicode string cursor.executemany(str(query), args) @@ -787,7 +760,7 @@ self.critical("sql many: %r\n args: %s\ndbms message: %r", query, args, ex.args[0]) try: - session.cnxset.connection(self.uri).rollback() + cnx.cnxset.rollback() if self.repo.config.mode != 'test': self.critical('transaction has been rolled back') except Exception: @@ -796,7 +769,7 @@ # short cut to method requiring advanced db helper usage ################## - def update_rdef_column(self, session, rdef): + def update_rdef_column(self, cnx, rdef): """update physical column for a relation definition (final or inlined) """ table, column = rdef_table_column(rdef) @@ -805,12 +778,12 @@ self.error("backend can't alter %s.%s to %s%s", table, column, coltype, not allownull and 'NOT NULL' or '') return - self.dbhelper.change_col_type(LogCursor(session.cnxset[self.uri]), + self.dbhelper.change_col_type(LogCursor(cnx.cnxset.cu), table, column, coltype, allownull) self.info('altered %s.%s: now %s%s', table, column, coltype, not allownull and 'NOT NULL' or '') - def update_rdef_null_allowed(self, session, rdef): + def update_rdef_null_allowed(self, cnx, rdef): """update NULL / NOT NULL of physical column for a relation definition (final or inlined) """ @@ -820,62 +793,62 @@ return table, column = rdef_table_column(rdef) coltype, allownull = rdef_physical_info(self.dbhelper, rdef) - self.dbhelper.set_null_allowed(LogCursor(session.cnxset[self.uri]), + self.dbhelper.set_null_allowed(LogCursor(cnx.cnxset.cu), table, column, coltype, allownull) - def update_rdef_indexed(self, session, rdef): + def update_rdef_indexed(self, cnx, rdef): table, column = rdef_table_column(rdef) if rdef.indexed: - self.create_index(session, table, column) + self.create_index(cnx, table, column) else: - self.drop_index(session, table, column) + self.drop_index(cnx, table, column) - def update_rdef_unique(self, session, rdef): + def update_rdef_unique(self, cnx, rdef): table, column = rdef_table_column(rdef) if rdef.constraint_by_type('UniqueConstraint'): - self.create_index(session, table, column, unique=True) + self.create_index(cnx, table, column, unique=True) else: - self.drop_index(session, table, column, unique=True) + self.drop_index(cnx, table, column, unique=True) - def create_index(self, session, table, column, unique=False): - cursor = LogCursor(session.cnxset[self.uri]) + def create_index(self, cnx, table, column, unique=False): + cursor = LogCursor(cnx.cnxset.cu) self.dbhelper.create_index(cursor, table, column, unique) - def drop_index(self, session, table, column, unique=False): - cursor = LogCursor(session.cnxset[self.uri]) + def drop_index(self, cnx, table, column, unique=False): + cursor = LogCursor(cnx.cnxset.cu) self.dbhelper.drop_index(cursor, table, column, unique) # system source interface ################################################# - def _eid_type_source(self, session, eid, sql, _retry=True): + def _eid_type_source(self, cnx, eid, sql, _retry=True): try: - res = self.doexec(session, sql).fetchone() + res = self.doexec(cnx, sql).fetchone() if res is not None: return res except (self.OperationalError, self.InterfaceError): - if session.mode == 'read' and _retry: + if cnx.mode == 'read' and _retry: self.warning("trying to reconnect (eid_type_source())") - session.cnxset.reconnect(self) - return self._eid_type_source(session, eid, sql, _retry=False) + cnx.cnxset.reconnect() + return self._eid_type_source(cnx, eid, sql, _retry=False) except Exception: - assert session.cnxset, 'session has no connections set' + assert cnx.cnxset, 'connection has no connections set' self.exception('failed to query entities table for eid %s', eid) raise UnknownEid(eid) - def eid_type_source(self, session, eid): # pylint: disable=E0202 + def eid_type_source(self, cnx, eid): # pylint: disable=E0202 """return a tuple (type, source, extid) for the entity with id """ - sql = 'SELECT type, source, extid, asource FROM entities WHERE eid=%s' % eid - res = self._eid_type_source(session, eid, sql) + sql = 'SELECT type, extid, asource FROM entities WHERE eid=%s' % eid + res = self._eid_type_source(cnx, eid, sql) if res[-2] is not None: if not isinstance(res, list): res = list(res) res[-2] = b64decode(res[-2]) return res - def eid_type_source_pre_131(self, session, eid): + def eid_type_source_pre_131(self, cnx, eid): """return a tuple (type, source, extid) for the entity with id """ - sql = 'SELECT type, source, extid FROM entities WHERE eid=%s' % eid - res = self._eid_type_source(session, eid, sql) + sql = 'SELECT type, extid FROM entities WHERE eid=%s' % eid + res = self._eid_type_source(cnx, eid, sql) if not isinstance(res, list): res = list(res) if res[-1] is not None: @@ -883,13 +856,12 @@ res.append(res[1]) return res - def extid2eid(self, session, source_uri, extid): + def extid2eid(self, cnx, extid): """get eid from an external id. Return None if no record found.""" assert isinstance(extid, str) - cursor = self.doexec(session, - 'SELECT eid FROM entities ' - 'WHERE extid=%(x)s AND source=%(s)s', - {'x': b64encode(extid), 's': source_uri}) + cursor = self.doexec(cnx, + 'SELECT eid FROM entities WHERE extid=%(x)s', + {'x': b64encode(extid)}) # XXX testing rowcount cause strange bug with sqlite, results are there # but rowcount is 0 #if cursor.rowcount > 0: @@ -901,167 +873,68 @@ pass return None - def make_temp_table_name(self, table): - return self.dbhelper.temporary_table_name(table) - - def temp_table_def(self, selected, sol, table): - return make_schema(selected, sol, table, self.dbhelper.TYPE_MAPPING) - - def create_temp_table(self, session, table, schema): - # we don't want on commit drop, this may cause problem when - # running with an ldap source, and table will be deleted manually any way - # on commit - sql = self.dbhelper.sql_temporary_table(table, schema, False) - self.doexec(session, sql) - - def _create_eid_sqlite(self, session): - with self._eid_cnx_lock: - for sql in self.dbhelper.sqls_increment_sequence('entities_id_seq'): - cursor = self.doexec(session, sql) - return cursor.fetchone()[0] - - - def create_eid(self, session): # pylint: disable=E0202 - # lock needed to prevent 'Connection is busy with results for another - # command (0)' errors with SQLServer - with self._eid_cnx_lock: - return self._create_eid() # pylint: disable=E1102 - - def _create_eid(self): # pylint: disable=E0202 - # internal function doing the eid creation without locking. - # needed for the recursive handling of disconnections (otherwise we - # deadlock on self._eid_cnx_lock - if self._eid_creation_cnx is None: - self._eid_creation_cnx = self.get_connection() - cnx = self._eid_creation_cnx - try: - cursor = cnx.cursor() - for sql in self.dbhelper.sqls_increment_sequence('entities_id_seq'): - cursor.execute(sql) - eid = cursor.fetchone()[0] - except (self.OperationalError, self.InterfaceError): - # FIXME: better detection of deconnection pb - self.warning("trying to reconnect create eid connection") - self._eid_creation_cnx = None - return self._create_eid() # pylint: disable=E1102 - except self.DbapiError as exc: - # We get this one with pyodbc and SQL Server when connection was reset - if exc.args[0] == '08S01': - self.warning("trying to reconnect create eid connection") - self._eid_creation_cnx = None - return self._create_eid() # pylint: disable=E1102 - else: - raise - except Exception: # WTF? - cnx.rollback() - self._eid_creation_cnx = None - self.exception('create eid failed in an unforeseen way on SQL statement %s', sql) - raise - else: - cnx.commit() - return eid - - def _handle_is_relation_sql(self, session, sql, attrs): + def _handle_is_relation_sql(self, cnx, sql, attrs): """ Handler for specific is_relation sql that may be overwritten in some stores""" - self.doexec(session, sql % attrs) + self.doexec(cnx, sql % attrs) _handle_insert_entity_sql = doexec _handle_is_instance_of_sql = _handle_source_relation_sql = _handle_is_relation_sql - def add_info(self, session, entity, source, extid, complete): + def add_info(self, cnx, entity, source, extid): """add type and source info for an eid into the system table""" + assert cnx.cnxset is not None # begin by inserting eid/type/source/extid into the entities table if extid is not None: assert isinstance(extid, str) extid = b64encode(extid) - uri = 'system' if source.copy_based_source else source.uri attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid, - 'source': uri, 'asource': source.uri, 'mtime': datetime.utcnow()} - self._handle_insert_entity_sql(session, self.sqlgen.insert('entities', attrs), attrs) + 'asource': source.uri} + self._handle_insert_entity_sql(cnx, self.sqlgen.insert('entities', attrs), attrs) # insert core relations: is, is_instance_of and cw_source try: - self._handle_is_relation_sql(session, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, eschema_eid(session, entity.e_schema))) + self._handle_is_relation_sql(cnx, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)', + (entity.eid, eschema_eid(cnx, entity.e_schema))) except IndexError: # during schema serialization, skip pass else: for eschema in entity.e_schema.ancestors() + [entity.e_schema]: - self._handle_is_relation_sql(session, + self._handle_is_relation_sql(cnx, 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, eschema_eid(session, eschema))) + (entity.eid, eschema_eid(cnx, eschema))) if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10 - self._handle_is_relation_sql(session, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)', + self._handle_is_relation_sql(cnx, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)', (entity.eid, source.eid)) # now we can update the full text index if self.do_fti and self.need_fti_indexation(entity.cw_etype): - if complete: - entity.complete(entity.e_schema.indexable_attributes()) - self.index_entity(session, entity=entity) + self.index_entity(cnx, entity=entity) - def update_info(self, session, entity, need_fti_update): + def update_info(self, cnx, entity, need_fti_update): """mark entity as being modified, fulltext reindex if needed""" if self.do_fti and need_fti_update: # reindex the entity only if this query is updating at least # one indexable attribute - self.index_entity(session, entity=entity) - # update entities.mtime. - # XXX Only if entity.cw_etype in self.multisources_etypes? - attrs = {'eid': entity.eid, 'mtime': datetime.utcnow()} - self.doexec(session, self.sqlgen.update('entities', attrs, ['eid']), attrs) + self.index_entity(cnx, entity=entity) - def delete_info_multi(self, session, entities, uri): + def delete_info_multi(self, cnx, entities): """delete system information on deletion of a list of entities with the same etype and belinging to the same source * update the fti * remove record from the `entities` table - * transfer it to the `deleted_entities` """ - self.fti_unindex_entities(session, entities) + self.fti_unindex_entities(cnx, entities) attrs = {'eid': '(%s)' % ','.join([str(_e.eid) for _e in entities])} - self.doexec(session, self.sqlgen.delete_many('entities', attrs), attrs) - if entities[0].__regid__ not in self.multisources_etypes: - return - attrs = {'type': entities[0].__regid__, - 'source': uri, 'dtime': datetime.utcnow()} - for entity in entities: - extid = entity.cw_metainformation()['extid'] - if extid is not None: - assert isinstance(extid, str), type(extid) - extid = b64encode(extid) - attrs.update({'eid': entity.eid, 'extid': extid}) - self.doexec(session, self.sqlgen.insert('deleted_entities', attrs), attrs) - - def modified_entities(self, session, etypes, mtime): - """return a 2-uple: - * list of (etype, eid) of entities of the given types which have been - modified since the given timestamp (actually entities whose full text - index content has changed) - * list of (etype, eid) of entities of the given types which have been - deleted since the given timestamp - """ - for etype in etypes: - if not etype in self.multisources_etypes: - self.error('%s not listed as a multi-sources entity types. ' - 'Modify your configuration' % etype) - self.multisources_etypes.add(etype) - modsql = _modified_sql('entities', etypes) - cursor = self.doexec(session, modsql, {'time': mtime}) - modentities = cursor.fetchall() - delsql = _modified_sql('deleted_entities', etypes) - cursor = self.doexec(session, delsql, {'time': mtime}) - delentities = cursor.fetchall() - return modentities, delentities + self.doexec(cnx, self.sqlgen.delete_many('entities', attrs), attrs) # undo support ############################################################# - def undoable_transactions(self, session, ueid=None, **actionfilters): - """See :class:`cubicweb.dbapi.Connection.undoable_transactions`""" - # force filtering to session's user if not a manager - if not session.user.is_in_group('managers'): - ueid = session.user.eid + def undoable_transactions(self, cnx, ueid=None, **actionfilters): + """See :class:`cubicweb.repoapi.ClientConnection.undoable_transactions`""" + # force filtering to connection's user if not a manager + if not cnx.user.is_in_group('managers'): + ueid = cnx.user.eid restr = {} if ueid is not None: restr['tx_user'] = ueid @@ -1125,17 +998,18 @@ restr.update(tearestr) # we want results ordered by transaction's time descendant sql += ' ORDER BY tx_time DESC' - cu = self.doexec(session, sql, restr) - # turn results into transaction objects - return [tx.Transaction(*args) for args in cu.fetchall()] + with cnx.ensure_cnx_set: + cu = self.doexec(cnx, sql, restr) + # turn results into transaction objects + return [tx.Transaction(*args) for args in cu.fetchall()] - def tx_info(self, session, txuuid): - """See :class:`cubicweb.dbapi.Connection.transaction_info`""" - return tx.Transaction(txuuid, *self._tx_info(session, txuuid)) + def tx_info(self, cnx, txuuid): + """See :class:`cubicweb.repoapi.ClientConnection.transaction_info`""" + return tx.Transaction(txuuid, *self._tx_info(cnx, txuuid)) - def tx_actions(self, session, txuuid, public): - """See :class:`cubicweb.dbapi.Connection.transaction_actions`""" - self._tx_info(session, txuuid) + def tx_actions(self, cnx, txuuid, public): + """See :class:`cubicweb.repoapi.ClientConnection.transaction_actions`""" + self._tx_info(cnx, txuuid) restr = {'tx_uuid': txuuid} if public: restr['txa_public'] = True @@ -1143,54 +1017,54 @@ sql = self.sqlgen.select('tx_entity_actions', restr, ('txa_action', 'txa_public', 'txa_order', 'etype', 'eid', 'changes')) - cu = self.doexec(session, sql, restr) + cu = self.doexec(cnx, sql, restr) actions = [tx.EntityAction(a,p,o,et,e,c and loads(self.binary_to_str(c))) for a,p,o,et,e,c in cu.fetchall()] sql = self.sqlgen.select('tx_relation_actions', restr, ('txa_action', 'txa_public', 'txa_order', 'rtype', 'eid_from', 'eid_to')) - cu = self.doexec(session, sql, restr) + cu = self.doexec(cnx, sql, restr) actions += [tx.RelationAction(*args) for args in cu.fetchall()] return sorted(actions, key=lambda x: x.order) - def undo_transaction(self, session, txuuid): - """See :class:`cubicweb.dbapi.Connection.undo_transaction` + def undo_transaction(self, cnx, txuuid): + """See :class:`cubicweb.repoapi.ClientConnection.undo_transaction` important note: while undoing of a transaction, only hooks in the 'integrity', 'activeintegrity' and 'undo' categories are called. """ # set mode so connections set isn't released subsquently until commit/rollback - session.mode = 'write' + cnx.mode = 'write' errors = [] - session.transaction_data['undoing_uuid'] = txuuid - with session.deny_all_hooks_but('integrity', 'activeintegrity', 'undo'): - with session.security_enabled(read=False): - for action in reversed(self.tx_actions(session, txuuid, False)): + cnx.transaction_data['undoing_uuid'] = txuuid + with cnx.deny_all_hooks_but('integrity', 'activeintegrity', 'undo'): + with cnx.security_enabled(read=False): + for action in reversed(self.tx_actions(cnx, txuuid, False)): undomethod = getattr(self, '_undo_%s' % action.action.lower()) - errors += undomethod(session, action) + errors += undomethod(cnx, action) # remove the transactions record - self.doexec(session, + self.doexec(cnx, "DELETE FROM transactions WHERE tx_uuid='%s'" % txuuid) if errors: raise UndoTransactionException(txuuid, errors) else: return - def start_undoable_transaction(self, session, uuid): - """session callback to insert a transaction record in the transactions + def start_undoable_transaction(self, cnx, uuid): + """connection callback to insert a transaction record in the transactions table when some undoable transaction is started """ - ueid = session.user.eid + ueid = cnx.user.eid attrs = {'tx_uuid': uuid, 'tx_user': ueid, 'tx_time': datetime.utcnow()} - self.doexec(session, self.sqlgen.insert('transactions', attrs), attrs) + self.doexec(cnx, self.sqlgen.insert('transactions', attrs), attrs) - def _save_attrs(self, session, entity, attrs): + def _save_attrs(self, cnx, entity, attrs): """return a pickleable dictionary containing current values for given attributes of the entity """ restr = {'cw_eid': entity.eid} sql = self.sqlgen.select(SQL_PREFIX + entity.cw_etype, restr, attrs) - cu = self.doexec(session, sql, restr) + cu = self.doexec(cnx, sql, restr) values = dict(zip(attrs, cu.fetchone())) # ensure backend specific binary are converted back to string eschema = entity.e_schema @@ -1205,36 +1079,38 @@ values[column] = self.binary_to_str(value) return values - def _record_tx_action(self, session, table, action, **kwargs): + def _record_tx_action(self, cnx, table, action, **kwargs): """record a transaction action in the given table (either 'tx_entity_actions' or 'tx_relation_action') """ - kwargs['tx_uuid'] = session.transaction_uuid() + kwargs['tx_uuid'] = cnx.transaction_uuid() kwargs['txa_action'] = action - kwargs['txa_order'] = session.transaction_inc_action_counter() - kwargs['txa_public'] = session.running_dbapi_query - self.doexec(session, self.sqlgen.insert(table, kwargs), kwargs) + kwargs['txa_order'] = cnx.transaction_inc_action_counter() + kwargs['txa_public'] = cnx.running_dbapi_query + self.doexec(cnx, self.sqlgen.insert(table, kwargs), kwargs) - def _tx_info(self, session, txuuid): + def _tx_info(self, cnx, txuuid): """return transaction's time and user of the transaction with the given uuid. raise `NoSuchTransaction` if there is no such transaction of if the - session's user isn't allowed to see it. + connection's user isn't allowed to see it. """ - restr = {'tx_uuid': txuuid} - sql = self.sqlgen.select('transactions', restr, ('tx_time', 'tx_user')) - cu = self.doexec(session, sql, restr) - try: - time, ueid = cu.fetchone() - except TypeError: - raise tx.NoSuchTransaction(txuuid) - if not (session.user.is_in_group('managers') - or session.user.eid == ueid): - raise tx.NoSuchTransaction(txuuid) - return time, ueid + with cnx.ensure_cnx_set: + restr = {'tx_uuid': txuuid} + sql = self.sqlgen.select('transactions', restr, + ('tx_time', 'tx_user')) + cu = self.doexec(cnx, sql, restr) + try: + time, ueid = cu.fetchone() + except TypeError: + raise tx.NoSuchTransaction(txuuid) + if not (cnx.user.is_in_group('managers') + or cnx.user.eid == ueid): + raise tx.NoSuchTransaction(txuuid) + return time, ueid def _reedit_entity(self, entity, changes, err): - session = entity._cw + cnx = entity._cw eid = entity.eid entity.cw_edited = edited = EditedEntity(entity) # check for schema changes, entities linked through inlined relation @@ -1248,7 +1124,7 @@ try: rschema = getrschema[rtype] except KeyError: - err(session._("can't restore relation %(rtype)s of entity %(eid)s, " + err(cnx._("can't restore relation %(rtype)s of entity %(eid)s, " "this relation does not exist in the schema anymore.") % {'rtype': rtype, 'eid': eid}) if not rschema.final: @@ -1261,57 +1137,53 @@ entity._cw.entity_from_eid(value) # check target exists edited[rtype] = value except UnknownEid: - err(session._("can't restore entity %(eid)s of type %(eschema)s, " + err(cnx._("can't restore entity %(eid)s of type %(eschema)s, " "target of %(rtype)s (eid %(value)s) does not exist any longer") % locals()) elif eschema.destination(rtype) in ('Bytes', 'Password'): changes[column] = self._binary(value) edited[rtype] = Binary(value) elif isinstance(value, str): - edited[rtype] = unicode(value, session.encoding, 'replace') + edited[rtype] = unicode(value, cnx.encoding, 'replace') else: edited[rtype] = value # This must only be done after init_entitiy_caches : defered in calling functions # edited.check() - def _undo_d(self, session, action): + def _undo_d(self, cnx, action): """undo an entity deletion""" errors = [] err = errors.append eid = action.eid etype = action.etype - _ = session._ + _ = cnx._ # get an entity instance try: - entity = self.repo.vreg['etypes'].etype_class(etype)(session) + entity = self.repo.vreg['etypes'].etype_class(etype)(cnx) except Exception: err("can't restore entity %s of type %s, type no more supported" % (eid, etype)) return errors self._reedit_entity(entity, action.changes, err) entity.eid = eid - session.repo.init_entity_caches(session, entity, self) + cnx.repo.init_entity_caches(cnx, entity, self) entity.cw_edited.check() - self.repo.hm.call_hooks('before_add_entity', session, entity=entity) + self.repo.hm.call_hooks('before_add_entity', cnx, entity=entity) # restore the entity action.changes['cw_eid'] = eid sql = self.sqlgen.insert(SQL_PREFIX + etype, action.changes) - self.doexec(session, sql, action.changes) + self.doexec(cnx, sql, action.changes) # restore record in entities (will update fti if needed) - self.add_info(session, entity, self, None, True) - # remove record from deleted_entities if entity's type is multi-sources - if entity.cw_etype in self.multisources_etypes: - self.doexec(session, - 'DELETE FROM deleted_entities WHERE eid=%s' % eid) - self.repo.hm.call_hooks('after_add_entity', session, entity=entity) + self.add_info(cnx, entity, self, None) + self.repo.hm.call_hooks('after_add_entity', cnx, entity=entity) return errors - def _undo_r(self, session, action): + def _undo_r(self, cnx, action): """undo a relation removal""" errors = [] subj, rtype, obj = action.eid_from, action.rtype, action.eid_to try: - sentity, oentity, rdef = _undo_rel_info(session, subj, rtype, obj) + sentity, oentity, rdef = _undo_rel_info(cnx, subj, rtype, obj) except _UndoException as ex: errors.append(unicode(ex)) else: @@ -1323,78 +1195,78 @@ errors.append(unicode(ex)) continue if not errors: - self.repo.hm.call_hooks('before_add_relation', session, + self.repo.hm.call_hooks('before_add_relation', cnx, eidfrom=subj, rtype=rtype, eidto=obj) # add relation in the database - self._add_relations(session, rtype, [(subj, obj)], rdef.rtype.inlined) + self._add_relations(cnx, rtype, [(subj, obj)], rdef.rtype.inlined) # set related cache - session.update_rel_cache_add(subj, rtype, obj, rdef.rtype.symmetric) - self.repo.hm.call_hooks('after_add_relation', session, + cnx.update_rel_cache_add(subj, rtype, obj, rdef.rtype.symmetric) + self.repo.hm.call_hooks('after_add_relation', cnx, eidfrom=subj, rtype=rtype, eidto=obj) return errors - def _undo_c(self, session, action): + def _undo_c(self, cnx, action): """undo an entity creation""" eid = action.eid # XXX done to avoid fetching all remaining relation for the entity # we should find an efficient way to do this (keeping current veolidf # massive deletion performance) - if _undo_has_later_transaction(session, eid): - msg = session._('some later transaction(s) touch entity, undo them ' + if _undo_has_later_transaction(cnx, eid): + msg = cnx._('some later transaction(s) touch entity, undo them ' 'first') raise ValidationError(eid, {None: msg}) etype = action.etype # get an entity instance try: - entity = self.repo.vreg['etypes'].etype_class(etype)(session) + entity = self.repo.vreg['etypes'].etype_class(etype)(cnx) except Exception: - return [session._( + return [cnx._( "Can't undo creation of entity %(eid)s of type %(etype)s, type " "no more supported" % {'eid': eid, 'etype': etype})] entity.eid = eid # for proper eid/type cache update - CleanupDeletedEidsCacheOp.get_instance(session).add_data(eid) - self.repo.hm.call_hooks('before_delete_entity', session, entity=entity) + CleanupDeletedEidsCacheOp.get_instance(cnx).add_data(eid) + self.repo.hm.call_hooks('before_delete_entity', cnx, entity=entity) # remove is / is_instance_of which are added using sql by hooks, hence # unvisible as transaction action - self.doexec(session, 'DELETE FROM is_relation WHERE eid_from=%s' % eid) - self.doexec(session, 'DELETE FROM is_instance_of_relation WHERE eid_from=%s' % eid) - self.doexec(session, 'DELETE FROM cw_source_relation WHERE eid_from=%s' % self.eid) + self.doexec(cnx, 'DELETE FROM is_relation WHERE eid_from=%s' % eid) + self.doexec(cnx, 'DELETE FROM is_instance_of_relation WHERE eid_from=%s' % eid) + self.doexec(cnx, 'DELETE FROM cw_source_relation WHERE eid_from=%s' % self.eid) # XXX check removal of inlined relation? # delete the entity attrs = {'cw_eid': eid} sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs) - self.doexec(session, sql, attrs) + self.doexec(cnx, sql, attrs) # remove record from entities (will update fti if needed) - self.delete_info_multi(session, [entity], self.uri) - self.repo.hm.call_hooks('after_delete_entity', session, entity=entity) + self.delete_info_multi(cnx, [entity]) + self.repo.hm.call_hooks('after_delete_entity', cnx, entity=entity) return () - def _undo_u(self, session, action): + def _undo_u(self, cnx, action): """undo an entity update""" errors = [] err = errors.append try: - entity = session.entity_from_eid(action.eid) + entity = cnx.entity_from_eid(action.eid) except UnknownEid: - err(session._("can't restore state of entity %s, it has been " + err(cnx._("can't restore state of entity %s, it has been " "deleted inbetween") % action.eid) return errors self._reedit_entity(entity, action.changes, err) entity.cw_edited.check() - self.repo.hm.call_hooks('before_update_entity', session, entity=entity) + self.repo.hm.call_hooks('before_update_entity', cnx, entity=entity) sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, action.changes, ['cw_eid']) - self.doexec(session, sql, action.changes) - self.repo.hm.call_hooks('after_update_entity', session, entity=entity) + self.doexec(cnx, sql, action.changes) + self.repo.hm.call_hooks('after_update_entity', cnx, entity=entity) return errors - def _undo_a(self, session, action): + def _undo_a(self, cnx, action): """undo a relation addition""" errors = [] subj, rtype, obj = action.eid_from, action.rtype, action.eid_to try: - sentity, oentity, rdef = _undo_rel_info(session, subj, rtype, obj) + sentity, oentity, rdef = _undo_rel_info(cnx, subj, rtype, obj) except _UndoException as ex: errors.append(unicode(ex)) else: @@ -1405,19 +1277,19 @@ else: sql = 'SELECT 1 FROM %s_relation WHERE eid_from=%s and eid_to=%s'\ % (rtype, subj, obj) - cu = self.doexec(session, sql) + cu = self.doexec(cnx, sql) if cu.fetchone() is None: - errors.append(session._( + errors.append(cnx._( "Can't undo addition of relation %(rtype)s from %(subj)s to" " %(obj)s, doesn't exist anymore" % locals())) if not errors: - self.repo.hm.call_hooks('before_delete_relation', session, + self.repo.hm.call_hooks('before_delete_relation', cnx, eidfrom=subj, rtype=rtype, eidto=obj) # delete relation from the database - self._delete_relation(session, subj, rtype, obj, rschema.inlined) + self._delete_relation(cnx, subj, rtype, obj, rschema.inlined) # set related cache - session.update_rel_cache_del(subj, rtype, obj, rschema.symmetric) - self.repo.hm.call_hooks('after_delete_relation', session, + cnx.update_rel_cache_del(subj, rtype, obj, rschema.symmetric) + self.repo.hm.call_hooks('after_delete_relation', cnx, eidfrom=subj, rtype=rtype, eidto=obj) return errors @@ -1432,16 +1304,16 @@ return True return False - def index_entity(self, session, entity): + def index_entity(self, cnx, entity): """create an operation to [re]index textual content of the given entity on commit """ - FTIndexEntityOp.get_instance(session).add_data(entity.eid) + FTIndexEntityOp.get_instance(cnx).add_data(entity.eid) - def fti_unindex_entities(self, session, entities): + def fti_unindex_entities(self, cnx, entities): """remove text content for entities from the full text index """ - cursor = session.cnxset['system'] + cursor = cnx.cnxset.cu cursor_unindex_object = self.dbhelper.cursor_unindex_object try: for entity in entities: @@ -1450,11 +1322,11 @@ self.exception('error while unindexing %s', entity) - def fti_index_entities(self, session, entities): + def fti_index_entities(self, cnx, entities): """add text content of created/modified entities to the full text index """ cursor_index_object = self.dbhelper.cursor_index_object - cursor = session.cnxset['system'] + cursor = cnx.cnxset.cu try: # use cursor_index_object, not cursor_reindex_object since # unindexing done in the FTIndexEntityOp @@ -1475,10 +1347,10 @@ """ def precommit_event(self): - session = self.session - source = session.repo.system_source - pendingeids = session.transaction_data.get('pendingeids', ()) - done = session.transaction_data.setdefault('indexedeids', set()) + cnx = self.cnx + source = cnx.repo.system_source + pendingeids = cnx.transaction_data.get('pendingeids', ()) + done = cnx.transaction_data.setdefault('indexedeids', set()) to_reindex = set() for eid in self.get_data(): if eid in pendingeids or eid in done: @@ -1486,10 +1358,10 @@ # processed continue done.add(eid) - iftindexable = session.entity_from_eid(eid).cw_adapt_to('IFTIndexable') + iftindexable = cnx.entity_from_eid(eid).cw_adapt_to('IFTIndexable') to_reindex |= set(iftindexable.fti_containers()) - source.fti_unindex_entities(session, to_reindex) - source.fti_index_entities(session, to_reindex) + source.fti_unindex_entities(cnx, to_reindex) + source.fti_index_entities(cnx, to_reindex) def sql_schema(driver): helper = get_db_helper(driver) @@ -1502,26 +1374,12 @@ CREATE TABLE entities ( eid INTEGER PRIMARY KEY NOT NULL, type VARCHAR(64) NOT NULL, - source VARCHAR(128) NOT NULL, asource VARCHAR(128) NOT NULL, - mtime %s NOT NULL, extid VARCHAR(256) );; CREATE INDEX entities_type_idx ON entities(type);; -CREATE INDEX entities_mtime_idx ON entities(mtime);; CREATE INDEX entities_extid_idx ON entities(extid);; -CREATE TABLE deleted_entities ( - eid INTEGER PRIMARY KEY NOT NULL, - type VARCHAR(64) NOT NULL, - source VARCHAR(128) NOT NULL, - dtime %s NOT NULL, - extid VARCHAR(256) -);; -CREATE INDEX deleted_entities_type_idx ON deleted_entities(type);; -CREATE INDEX deleted_entities_dtime_idx ON deleted_entities(dtime);; -CREATE INDEX deleted_entities_extid_idx ON deleted_entities(extid);; - CREATE TABLE transactions ( tx_uuid CHAR(32) PRIMARY KEY NOT NULL, tx_user INTEGER NOT NULL, @@ -1559,8 +1417,8 @@ CREATE INDEX tx_relation_actions_eid_from_idx ON tx_relation_actions(eid_from);; CREATE INDEX tx_relation_actions_eid_to_idx ON tx_relation_actions(eid_to);; CREATE INDEX tx_relation_actions_tx_uuid_idx ON tx_relation_actions(tx_uuid);; -""" % (helper.sql_create_sequence('entities_id_seq').replace(';', ';;'), - typemap['Datetime'], typemap['Datetime'], typemap['Datetime'], +""" % (helper.sql_create_numrange('entities_id_seq').replace(';', ';;'), + typemap['Datetime'], typemap['Boolean'], typemap['Bytes'], typemap['Boolean']) if helper.backend_name == 'sqlite': # sqlite support the ON DELETE CASCADE syntax but do nothing @@ -1580,16 +1438,15 @@ return """ %s DROP TABLE entities; -DROP TABLE deleted_entities; DROP TABLE tx_entity_actions; DROP TABLE tx_relation_actions; DROP TABLE transactions; -""" % helper.sql_drop_sequence('entities_id_seq') +""" % helper.sql_drop_numrange('entities_id_seq') def grant_schema(user, set_owner=True): result = '' - for table in ('entities', 'deleted_entities', 'entities_id_seq', + for table in ('entities', 'entities_id_seq', 'transactions', 'tx_entity_actions', 'tx_relation_actions'): if set_owner: result = 'ALTER TABLE %s OWNER TO %s;\n' % (table, user) @@ -1619,7 +1476,7 @@ self._passwd_rqlst = self.source.compile_rql(self.passwd_rql, self._sols) self._auth_rqlst = self.source.compile_rql(self.auth_rql, self._sols) - def authenticate(self, session, login, password=None, **kwargs): + def authenticate(self, cnx, login, password=None, **kwargs): """return CWUser eid for the given login/password if this account is defined in this source, else raise `AuthenticationError` @@ -1628,7 +1485,7 @@ """ args = {'login': login, 'pwd' : None} if password is not None: - rset = self.source.syntax_tree_search(session, self._passwd_rqlst, args) + rset = self.source.syntax_tree_search(cnx, self._passwd_rqlst, args) try: pwd = rset[0][0] except IndexError: @@ -1639,7 +1496,7 @@ # passwords are stored using the Bytes type, so we get a StringIO args['pwd'] = Binary(crypt_password(password, pwd.getvalue())) # get eid from login and (crypted) password - rset = self.source.syntax_tree_search(session, self._auth_rqlst, args) + rset = self.source.syntax_tree_search(cnx, self._auth_rqlst, args) try: user = rset[0][0] # If the stored hash uses a deprecated scheme (e.g. DES or MD5 used @@ -1649,32 +1506,33 @@ if not verify: # should not happen, but... raise AuthenticationError('bad password') if newhash: - session.system_sql("UPDATE %s SET %s=%%(newhash)s WHERE %s=%%(login)s" % ( + cnx.system_sql("UPDATE %s SET %s=%%(newhash)s WHERE %s=%%(login)s" % ( SQL_PREFIX + 'CWUser', SQL_PREFIX + 'upassword', SQL_PREFIX + 'login'), {'newhash': self.source._binary(newhash), 'login': login}) - session.commit(free_cnxset=False) + cnx.commit(free_cnxset=False) return user except IndexError: raise AuthenticationError('bad password') class EmailPasswordAuthentifier(BaseAuthentifier): - def authenticate(self, session, login, **authinfo): + def authenticate(self, cnx, login, **authinfo): # email_auth flag prevent from infinite recursion (call to # repo.check_auth_info at the end of this method may lead us here again) if not '@' in login or authinfo.pop('email_auth', None): raise AuthenticationError('not an email') - rset = session.execute('Any L WHERE U login L, U primary_email M, ' + rset = cnx.execute('Any L WHERE U login L, U primary_email M, ' 'M address %(login)s', {'login': login}, build_descr=False) if rset.rowcount != 1: raise AuthenticationError('unexisting email') login = rset.rows[0][0] authinfo['email_auth'] = True - return self.source.repo.check_auth_info(session, login, authinfo) + return self.source.repo.check_auth_info(cnx, login, authinfo) + class DatabaseIndependentBackupRestore(object): """Helper class to perform db backend agnostic backup and restore @@ -1720,7 +1578,7 @@ self.cnx = self.get_connection() try: self.cursor = self.cnx.cursor() - self.cursor.arraysize=100 + self.cursor.arraysize = 100 self.logger.info('writing metadata') self.write_metadata(archive) for seq in self.get_sequences(): @@ -1736,7 +1594,6 @@ def get_tables(self): non_entity_tables = ['entities', - 'deleted_entities', 'transactions', 'tx_entity_actions', 'tx_relation_actions', @@ -1764,8 +1621,8 @@ archive.writestr('tables.txt', '\n'.join(self.get_tables())) archive.writestr('sequences.txt', '\n'.join(self.get_sequences())) versions = self._get_versions() - versions_str = '\n'.join('%s %s' % (k,v) - for k,v in versions) + versions_str = '\n'.join('%s %s' % (k, v) + for k, v in versions) archive.writestr('versions.txt', versions_str) def write_sequence(self, archive, seq): diff -r 84738d495ffd -r 793377697c81 server/sources/pyrorql.py --- a/server/sources/pyrorql.py Wed Sep 24 17:35:59 2014 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,63 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Source to query another RQL repository using pyro""" - -__docformat__ = "restructuredtext en" -_ = unicode - -# module is lazily imported -import warnings -warnings.warn('Imminent drop of pyrorql source. Switch to datafeed now!', - DeprecationWarning) - -import threading -from Pyro.errors import PyroError, ConnectionClosedError - -from cubicweb import ConnectionError -from cubicweb.server.sources import ConnectionWrapper - -from cubicweb.server.sources.remoterql import RemoteSource - -class PyroRQLSource(RemoteSource): - """External repository source, using Pyro connection""" - - def get_connection(self): - try: - return self._get_connection() - except (ConnectionError, PyroError) as ex: - self.critical("can't get connection to source %s: %s", self.uri, ex) - return ConnectionWrapper() - - def check_connection(self, cnx): - """check connection validity, return None if the connection is still valid - else a new connection - """ - # we have to transfer manually thread ownership. This can be done safely - # since the connections set holding the connection is affected to one - # session/thread and can't be called simultaneously - try: - cnx._repo._transferThread(threading.currentThread()) - except AttributeError: - # inmemory connection - pass - try: - return super(PyroRQLSource, self).check_connection(cnx) - except ConnectionClosedError: - # try to reconnect - return self.get_connection() - diff -r 84738d495ffd -r 793377697c81 server/sources/remoterql.py --- a/server/sources/remoterql.py Wed Sep 24 17:35:59 2014 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,670 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Source to query another RQL remote repository""" - -__docformat__ = "restructuredtext en" -_ = unicode - -from os.path import join -from base64 import b64decode - -from logilab.common.configuration import REQUIRED - -from yams.schema import role_name - -from rql.nodes import Constant -from rql.utils import rqlvar_maker - -from cubicweb import dbapi, server -from cubicweb import ValidationError, BadConnectionId, UnknownEid -from cubicweb.schema import VIRTUAL_RTYPES -from cubicweb.server.sources import (AbstractSource, ConnectionWrapper, - TimedCache, dbg_st_search, dbg_results) -from cubicweb.server.msplanner import neged_relation - -def uidtype(union, col, etype, args): - select, col = union.locate_subquery(col, etype, args) - return getattr(select.selection[col], 'uidtype', None) - - -class ReplaceByInOperator(Exception): - def __init__(self, eids): - self.eids = eids - -class RemoteSource(AbstractSource): - """Generic external repository source""" - - # boolean telling if modification hooks should be called when something is - # modified in this source - should_call_hooks = False - # boolean telling if the repository should connect to this source during - # migration - connect_for_migration = False - - options = ( - - ('cubicweb-user', - {'type' : 'string', - 'default': REQUIRED, - 'help': 'user to use for connection on the distant repository', - 'group': 'remote-source', 'level': 0, - }), - ('cubicweb-password', - {'type' : 'password', - 'default': '', - 'help': 'user to use for connection on the distant repository', - 'group': 'remote-source', 'level': 0, - }), - ('base-url', - {'type' : 'string', - 'default': '', - 'help': 'url of the web site for the distant repository, if you want ' - 'to generate external link to entities from this repository', - 'group': 'remote-source', 'level': 1, - }), - ('skip-external-entities', - {'type' : 'yn', - 'default': False, - 'help': 'should entities not local to the source be considered or not', - 'group': 'remote-source', 'level': 0, - }), - ('synchronization-interval', - {'type' : 'time', - 'default': '5min', - 'help': 'interval between synchronization with the external \ -repository (default to 5 minutes).', - 'group': 'remote-source', 'level': 2, - })) - - PUBLIC_KEYS = AbstractSource.PUBLIC_KEYS + ('base-url',) - - _conn = None - - def __init__(self, repo, source_config, eid=None): - super(RemoteSource, self).__init__(repo, source_config, eid) - self._query_cache = TimedCache(1800) - - def update_config(self, source_entity, processed_config): - """update configuration from source entity""" - super(RemoteSource, self).update_config(source_entity, processed_config) - baseurl = processed_config.get('base-url') - if baseurl and not baseurl.endswith('/'): - processed_config['base-url'] += '/' - self.config = processed_config - self._skip_externals = processed_config['skip-external-entities'] - if source_entity is not None: - self.latest_retrieval = source_entity.latest_retrieval - - def _entity_update(self, source_entity): - super(RemoteSource, self)._entity_update(source_entity) - if self.urls and len(self.urls) > 1: - raise ValidationError(source_entity.eid, {'url': _('can only have one url')}) - - def get_connection(self): - try: - return self._get_connection() - except ConnectionError as ex: - self.critical("can't get connection to source %s: %s", self.uri, ex) - return ConnectionWrapper() - - def _get_connection(self): - """open and return a connection to the source""" - self.info('connecting to source %s as user %s', - self.urls[0], self.config['cubicweb-user']) - # XXX check protocol according to source type (zmq / pyro) - return dbapi.connect(self.urls[0], login=self.config['cubicweb-user'], - password=self.config['cubicweb-password']) - - def reset_caches(self): - """method called during test to reset potential source caches""" - self._query_cache = TimedCache(1800) - - def init(self, activated, source_entity): - """method called by the repository once ready to handle request""" - super(RemoteSource, self).init(activated, source_entity) - self.load_mapping(source_entity._cw) - if activated: - interval = self.config['synchronization-interval'] - self.repo.looping_task(interval, self.synchronize) - self.repo.looping_task(self._query_cache.ttl.seconds/10, - self._query_cache.clear_expired) - self.latest_retrieval = source_entity.latest_retrieval - - def load_mapping(self, session=None): - self.support_entities = {} - self.support_relations = {} - self.dont_cross_relations = set(('owned_by', 'created_by')) - self.cross_relations = set() - assert self.eid is not None - self._schemacfg_idx = {} - self._load_mapping(session) - - etype_options = set(('write',)) - rtype_options = set(('maycross', 'dontcross', 'write',)) - - def _check_options(self, schemacfg, allowedoptions): - if schemacfg.options: - options = set(w.strip() for w in schemacfg.options.split(':')) - else: - options = set() - if options - allowedoptions: - options = ', '.join(sorted(options - allowedoptions)) - msg = _('unknown option(s): %s' % options) - raise ValidationError(schemacfg.eid, {role_name('options', 'subject'): msg}) - return options - - def add_schema_config(self, schemacfg, checkonly=False): - """added CWSourceSchemaConfig, modify mapping accordingly""" - try: - ertype = schemacfg.schema.name - except AttributeError: - msg = schemacfg._cw._("attribute/relation can't be mapped, only " - "entity and relation types") - raise ValidationError(schemacfg.eid, {role_name('cw_for_schema', 'subject'): msg}) - if schemacfg.schema.__regid__ == 'CWEType': - options = self._check_options(schemacfg, self.etype_options) - if not checkonly: - self.support_entities[ertype] = 'write' in options - else: # CWRType - if ertype in ('is', 'is_instance_of', 'cw_source') or ertype in VIRTUAL_RTYPES: - msg = schemacfg._cw._('%s relation should not be in mapped') % ertype - raise ValidationError(schemacfg.eid, {role_name('cw_for_schema', 'subject'): msg}) - options = self._check_options(schemacfg, self.rtype_options) - if 'dontcross' in options: - if 'maycross' in options: - msg = schemacfg._("can't mix dontcross and maycross options") - raise ValidationError(schemacfg.eid, {role_name('options', 'subject'): msg}) - if 'write' in options: - msg = schemacfg._("can't mix dontcross and write options") - raise ValidationError(schemacfg.eid, {role_name('options', 'subject'): msg}) - if not checkonly: - self.dont_cross_relations.add(ertype) - elif not checkonly: - self.support_relations[ertype] = 'write' in options - if 'maycross' in options: - self.cross_relations.add(ertype) - if not checkonly: - # add to an index to ease deletion handling - self._schemacfg_idx[schemacfg.eid] = ertype - - def del_schema_config(self, schemacfg, checkonly=False): - """deleted CWSourceSchemaConfig, modify mapping accordingly""" - if checkonly: - return - try: - ertype = self._schemacfg_idx[schemacfg.eid] - if ertype[0].isupper(): - del self.support_entities[ertype] - else: - if ertype in self.support_relations: - del self.support_relations[ertype] - if ertype in self.cross_relations: - self.cross_relations.remove(ertype) - else: - self.dont_cross_relations.remove(ertype) - except Exception: - self.error('while updating mapping consequently to removal of %s', - schemacfg) - - def local_eid(self, cnx, extid, session): - etype, dexturi, dextid = cnx.describe(extid) - if dexturi == 'system' or not ( - dexturi in self.repo.sources_by_uri or self._skip_externals): - assert etype in self.support_entities, etype - eid = self.repo.extid2eid(self, str(extid), etype, session) - if eid > 0: - return eid, True - elif dexturi in self.repo.sources_by_uri: - source = self.repo.sources_by_uri[dexturi] - cnx = session.cnxset.connection(source.uri) - eid = source.local_eid(cnx, dextid, session)[0] - return eid, False - return None, None - - def synchronize(self, mtime=None): - """synchronize content known by this repository with content in the - external repository - """ - self.info('synchronizing remote source %s', self.uri) - cnx = self.get_connection() - try: - extrepo = cnx._repo - except AttributeError: - # fake connection wrapper returned when we can't connect to the - # external source (hence we've no chance to synchronize...) - return - etypes = list(self.support_entities) - if mtime is None: - mtime = self.latest_retrieval - updatetime, modified, deleted = extrepo.entities_modified_since(etypes, mtime) - self._query_cache.clear() - repo = self.repo - session = repo.internal_session() - source = repo.system_source - try: - for etype, extid in modified: - try: - eid = self.local_eid(cnx, extid, session)[0] - if eid is not None: - rset = session.eid_rset(eid, etype) - entity = rset.get_entity(0, 0) - entity.complete(entity.e_schema.indexable_attributes()) - source.index_entity(session, entity) - except Exception: - self.exception('while updating %s with external id %s of source %s', - etype, extid, self.uri) - continue - for etype, extid in deleted: - try: - eid = self.repo.extid2eid(self, str(extid), etype, session, - insert=False) - # entity has been deleted from external repository but is not known here - if eid is not None: - entity = session.entity_from_eid(eid, etype) - repo.delete_info(session, entity, self.uri, - scleanup=self.eid) - except Exception: - if self.repo.config.mode == 'test': - raise - self.exception('while updating %s with external id %s of source %s', - etype, extid, self.uri) - continue - self.latest_retrieval = updatetime - session.execute('SET X latest_retrieval %(date)s WHERE X eid %(x)s', - {'x': self.eid, 'date': self.latest_retrieval}) - session.commit() - finally: - session.close() - - def get_connection(self): - raise NotImplementedError() - - def check_connection(self, cnx): - """check connection validity, return None if the connection is still valid - else a new connection - """ - if not isinstance(cnx, ConnectionWrapper): - try: - cnx.check() - return # ok - except BadConnectionId: - pass - # try to reconnect - return self.get_connection() - - def syntax_tree_search(self, session, union, args=None, cachekey=None, - varmap=None): - assert dbg_st_search(self.uri, union, varmap, args, cachekey) - rqlkey = union.as_string(kwargs=args) - try: - results = self._query_cache[rqlkey] - except KeyError: - results = self._syntax_tree_search(session, union, args) - self._query_cache[rqlkey] = results - assert dbg_results(results) - return results - - def _syntax_tree_search(self, session, union, args): - """return result from this source for a rql query (actually from a rql - syntax tree and a solution dictionary mapping each used variable to a - possible type). If cachekey is given, the query necessary to fetch the - results (but not the results themselves) may be cached using this key. - """ - if not args is None: - args = args.copy() - # get cached cursor anyway - cu = session.cnxset[self.uri] - if cu is None: - # this is a ConnectionWrapper instance - msg = session._("can't connect to source %s, some data may be missing") - session.set_shared_data('sources_error', msg % self.uri, txdata=True) - return [] - translator = RQL2RQL(self) - try: - rql = translator.generate(session, union, args) - except UnknownEid as ex: - if server.DEBUG: - print ' unknown eid', ex, 'no results' - return [] - if server.DEBUG & server.DBG_RQL: - print ' translated rql', rql - try: - rset = cu.execute(rql, args) - except Exception as ex: - self.exception(str(ex)) - msg = session._("error while querying source %s, some data may be missing") - session.set_shared_data('sources_error', msg % self.uri, txdata=True) - return [] - descr = rset.description - if rset: - needtranslation = [] - rows = rset.rows - for i, etype in enumerate(descr[0]): - if (etype is None or not self.schema.eschema(etype).final - or uidtype(union, i, etype, args)): - needtranslation.append(i) - if needtranslation: - cnx = session.cnxset.connection(self.uri) - for rowindex in xrange(rset.rowcount - 1, -1, -1): - row = rows[rowindex] - localrow = False - for colindex in needtranslation: - if row[colindex] is not None: # optional variable - eid, local = self.local_eid(cnx, row[colindex], session) - if local: - localrow = True - if eid is not None: - row[colindex] = eid - else: - # skip this row - del rows[rowindex] - del descr[rowindex] - break - else: - # skip row if it only contains eids of entities which - # are actually from a source we also know locally, - # except if some args specified (XXX should actually - # check if there are some args local to the source) - if not (translator.has_local_eid or localrow): - del rows[rowindex] - del descr[rowindex] - results = rows - else: - results = [] - return results - - def _entity_relations_and_kwargs(self, session, entity): - relations = [] - kwargs = {'x': self.repo.eid2extid(self, entity.eid, session)} - for key, val in entity.cw_attr_cache.iteritems(): - relations.append('X %s %%(%s)s' % (key, key)) - kwargs[key] = val - return relations, kwargs - - def add_entity(self, session, entity): - """add a new entity to the source""" - raise NotImplementedError() - - def update_entity(self, session, entity): - """update an entity in the source""" - relations, kwargs = self._entity_relations_and_kwargs(session, entity) - cu = session.cnxset[self.uri] - cu.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations), kwargs) - self._query_cache.clear() - entity.cw_clear_all_caches() - - def delete_entity(self, session, entity): - """delete an entity from the source""" - if session.deleted_in_transaction(self.eid): - # source is being deleted, don't propagate - self._query_cache.clear() - return - cu = session.cnxset[self.uri] - cu.execute('DELETE %s X WHERE X eid %%(x)s' % entity.cw_etype, - {'x': self.repo.eid2extid(self, entity.eid, session)}) - self._query_cache.clear() - - def add_relation(self, session, subject, rtype, object): - """add a relation to the source""" - cu = session.cnxset[self.uri] - cu.execute('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype, - {'x': self.repo.eid2extid(self, subject, session), - 'y': self.repo.eid2extid(self, object, session)}) - self._query_cache.clear() - session.entity_from_eid(subject).cw_clear_all_caches() - session.entity_from_eid(object).cw_clear_all_caches() - - def delete_relation(self, session, subject, rtype, object): - """delete a relation from the source""" - if session.deleted_in_transaction(self.eid): - # source is being deleted, don't propagate - self._query_cache.clear() - return - cu = session.cnxset[self.uri] - cu.execute('DELETE X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype, - {'x': self.repo.eid2extid(self, subject, session), - 'y': self.repo.eid2extid(self, object, session)}) - self._query_cache.clear() - session.entity_from_eid(subject).cw_clear_all_caches() - session.entity_from_eid(object).cw_clear_all_caches() - - -class RQL2RQL(object): - """translate a local rql query to be executed on a distant repository""" - def __init__(self, source): - self.source = source - self.repo = source.repo - self.current_operator = None - - def _accept_children(self, node): - res = [] - for child in node.children: - rql = child.accept(self) - if rql is not None: - res.append(rql) - return res - - def generate(self, session, rqlst, args): - self._session = session - self.kwargs = args - self.need_translation = False - self.has_local_eid = False - return self.visit_union(rqlst) - - def visit_union(self, node): - s = self._accept_children(node) - if len(s) > 1: - return ' UNION '.join('(%s)' % q for q in s) - return s[0] - - def visit_select(self, node): - """return the tree as an encoded rql string""" - self._varmaker = rqlvar_maker(defined=node.defined_vars.copy()) - self._const_var = {} - if node.distinct: - base = 'DISTINCT Any' - else: - base = 'Any' - s = ['%s %s' % (base, ','.join(v.accept(self) for v in node.selection))] - if node.groupby: - s.append('GROUPBY %s' % ', '.join(group.accept(self) - for group in node.groupby)) - if node.orderby: - s.append('ORDERBY %s' % ', '.join(self.visit_sortterm(term) - for term in node.orderby)) - if node.limit is not None: - s.append('LIMIT %s' % node.limit) - if node.offset: - s.append('OFFSET %s' % node.offset) - restrictions = [] - if node.where is not None: - nr = node.where.accept(self) - if nr is not None: - restrictions.append(nr) - if restrictions: - s.append('WHERE %s' % ','.join(restrictions)) - - if node.having: - s.append('HAVING %s' % ', '.join(term.accept(self) - for term in node.having)) - subqueries = [] - for subquery in node.with_: - subqueries.append('%s BEING (%s)' % (','.join(ca.name for ca in subquery.aliases), - self.visit_union(subquery.query))) - if subqueries: - s.append('WITH %s' % (','.join(subqueries))) - return ' '.join(s) - - def visit_and(self, node): - res = self._accept_children(node) - if res: - return ', '.join(res) - return - - def visit_or(self, node): - res = self._accept_children(node) - if len(res) > 1: - return ' OR '.join('(%s)' % rql for rql in res) - elif res: - return res[0] - return - - def visit_not(self, node): - rql = node.children[0].accept(self) - if rql: - return 'NOT (%s)' % rql - return - - def visit_exists(self, node): - rql = node.children[0].accept(self) - if rql: - return 'EXISTS(%s)' % rql - return - - def visit_relation(self, node): - try: - if isinstance(node.children[0], Constant): - # simplified rqlst, reintroduce eid relation - try: - restr, lhs = self.process_eid_const(node.children[0]) - except UnknownEid: - # can safely skip not relation with an unsupported eid - if neged_relation(node): - return - raise - else: - lhs = node.children[0].accept(self) - restr = None - except UnknownEid: - # can safely skip not relation with an unsupported eid - if neged_relation(node): - return - # XXX what about optional relation or outer NOT EXISTS() - raise - if node.optional in ('left', 'both'): - lhs += '?' - if node.r_type == 'eid' or not self.source.schema.rschema(node.r_type).final: - self.need_translation = True - self.current_operator = node.operator() - if isinstance(node.children[0], Constant): - self.current_etypes = (node.children[0].uidtype,) - else: - self.current_etypes = node.children[0].variable.stinfo['possibletypes'] - try: - rhs = node.children[1].accept(self) - except UnknownEid: - # can safely skip not relation with an unsupported eid - if neged_relation(node): - return - # XXX what about optional relation or outer NOT EXISTS() - raise - except ReplaceByInOperator as ex: - rhs = 'IN (%s)' % ','.join(eid for eid in ex.eids) - self.need_translation = False - self.current_operator = None - if node.optional in ('right', 'both'): - rhs += '?' - if restr is not None: - return '%s %s %s, %s' % (lhs, node.r_type, rhs, restr) - return '%s %s %s' % (lhs, node.r_type, rhs) - - def visit_comparison(self, node): - if node.operator in ('=', 'IS'): - return node.children[0].accept(self) - return '%s %s' % (node.operator.encode(), - node.children[0].accept(self)) - - def visit_mathexpression(self, node): - return '(%s %s %s)' % (node.children[0].accept(self), - node.operator.encode(), - node.children[1].accept(self)) - - def visit_function(self, node): - #if node.name == 'IN': - res = [] - for child in node.children: - try: - rql = child.accept(self) - except UnknownEid as ex: - continue - res.append(rql) - if not res: - raise ex - return '%s(%s)' % (node.name, ', '.join(res)) - - def visit_constant(self, node): - if self.need_translation or node.uidtype: - if node.type == 'Int': - self.has_local_eid = True - return str(self.eid2extid(node.value)) - if node.type == 'Substitute': - key = node.value - # ensure we have not yet translated the value... - if not key in self._const_var: - self.kwargs[key] = self.eid2extid(self.kwargs[key]) - self._const_var[key] = None - self.has_local_eid = True - return node.as_string() - - def visit_variableref(self, node): - """get the sql name for a variable reference""" - return node.name - - def visit_sortterm(self, node): - if node.asc: - return node.term.accept(self) - return '%s DESC' % node.term.accept(self) - - def process_eid_const(self, const): - value = const.eval(self.kwargs) - try: - return None, self._const_var[value] - except Exception: - var = self._varmaker.next() - self.need_translation = True - restr = '%s eid %s' % (var, self.visit_constant(const)) - self.need_translation = False - self._const_var[value] = var - return restr, var - - def eid2extid(self, eid): - try: - return self.repo.eid2extid(self.source, eid, self._session) - except UnknownEid: - operator = self.current_operator - if operator is not None and operator != '=': - # deal with query like "X eid > 12" - # - # The problem is that eid order in the external source may - # differ from the local source - # - # So search for all eids from this source matching the condition - # locally and then to replace the "> 12" branch by "IN (eids)" - # - # XXX we may have to insert a huge number of eids...) - sql = "SELECT extid FROM entities WHERE source='%s' AND type IN (%s) AND eid%s%s" - etypes = ','.join("'%s'" % etype for etype in self.current_etypes) - cu = self._session.system_sql(sql % (self.source.uri, etypes, - operator, eid)) - # XXX buggy cu.rowcount which may be zero while there are some - # results - rows = cu.fetchall() - if rows: - raise ReplaceByInOperator((b64decode(r[0]) for r in rows)) - raise - diff -r 84738d495ffd -r 793377697c81 server/sources/rql2sql.py --- a/server/sources/rql2sql.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/sources/rql2sql.py Wed Sep 24 18:04:30 2014 +0200 @@ -58,8 +58,8 @@ from rql import BadRQLQuery, CoercionError from rql.utils import common_parent from rql.stmts import Union, Select -from rql.nodes import (SortTerm, VariableRef, Constant, Function, Variable, Or, - Not, Comparison, ColumnAlias, Relation, SubQuery, Exists) +from rql.nodes import (VariableRef, Constant, Function, Variable, Or, + Not, Comparison, ColumnAlias, Relation, SubQuery) from cubicweb import QueryError from cubicweb.rqlrewrite import cleanup_solutions diff -r 84738d495ffd -r 793377697c81 server/sources/storages.py --- a/server/sources/storages.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/sources/storages.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -236,7 +236,7 @@ """return the current fs_path of the attribute, or None is the attr is not stored yet. """ - sysource = entity._cw.cnxset.source('system') + sysource = entity._cw.repo.system_source cu = sysource.doexec(entity._cw, 'SELECT cw_%s FROM cw_%s WHERE cw_eid=%s' % ( attr, entity.cw_etype, entity.eid)) diff -r 84738d495ffd -r 793377697c81 server/sources/zmqrql.py --- a/server/sources/zmqrql.py Wed Sep 24 17:35:59 2014 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,26 +0,0 @@ -# copyright 2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Source to query another RQL repository using pyro""" - -__docformat__ = "restructuredtext en" -_ = unicode - -from cubicweb.server.sources.remoterql import RemoteSource - -class ZMQRQLSource(RemoteSource): - """External repository source, using ZMQ sockets""" diff -r 84738d495ffd -r 793377697c81 server/sqlutils.py --- a/server/sqlutils.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/sqlutils.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -19,15 +19,18 @@ __docformat__ = "restructuredtext en" +import sys import os import re import subprocess -from datetime import datetime, date +from os.path import abspath from itertools import ifilter +from logging import getLogger from logilab import database as db, common as lgc from logilab.common.shellutils import ProgressBar -from logilab.common.date import todate, todatetime, utcdatetime, utctime +from logilab.common.deprecation import deprecated +from logilab.common.logging_ext import set_log_methods from logilab.database.sqlgen import SQLGenerator from cubicweb import Binary, ConfigurationError @@ -35,7 +38,6 @@ from cubicweb.schema import PURE_VIRTUAL_RTYPES from cubicweb.server import SQL_CONNECT_HOOKS from cubicweb.server.utils import crypt_password -from rql.utils import RQL_FUNCTIONS_REGISTRY lgc.USE_MX_DATETIME = False SQL_PREFIX = 'cw_' @@ -177,10 +179,125 @@ for name in ifilter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_tables(sqlcursor))] return '\n'.join(cmds) + +class ConnectionWrapper(object): + """handle connection to the system source, at some point associated to a + :class:`Session` + """ + + # since 3.19, we only have to manage the system source connection + def __init__(self, system_source): + # dictionary of (source, connection), indexed by sources'uri + self._source = system_source + self.cnx = system_source.get_connection() + self.cu = self.cnx.cursor() + + def commit(self): + """commit the current transaction for this user""" + # let exception propagates + self.cnx.commit() + + def rollback(self): + """rollback the current transaction for this user""" + # catch exceptions, rollback other sources anyway + try: + self.cnx.rollback() + except Exception: + self._source.critical('rollback error', exc_info=sys.exc_info()) + # error on rollback, the connection is much probably in a really + # bad state. Replace it by a new one. + self.reconnect() + + def close(self, i_know_what_i_do=False): + """close all connections in the set""" + if i_know_what_i_do is not True: # unexpected closing safety belt + raise RuntimeError('connections set shouldn\'t be closed') + try: + self.cu.close() + self.cu = None + except Exception: + pass + try: + self.cnx.close() + self.cnx = None + except Exception: + pass + + # internals ############################################################### + + def cnxset_freed(self): + """connections set is being freed from a session""" + pass # no nothing by default + + def reconnect(self): + """reopen a connection for this source or all sources if none specified + """ + try: + # properly close existing connection if any + self.cnx.close() + except Exception: + pass + self._source.info('trying to reconnect') + self.cnx = self._source.get_connection() + self.cu = self.cnx.cursor() + + @deprecated('[3.19] use .cu instead') + def __getitem__(self, uri): + assert uri == 'system' + return self.cu + + @deprecated('[3.19] use repo.system_source instead') + def source(self, uid): + assert uid == 'system' + return self._source + + @deprecated('[3.19] use .cnx instead') + def connection(self, uid): + assert uid == 'system' + return self.cnx + + +class SqliteConnectionWrapper(ConnectionWrapper): + """Sqlite specific connection wrapper: close the connection each time it's + freed (and reopen it later when needed) + """ + def __init__(self, system_source): + # don't call parent's __init__, we don't want to initiate the connection + self._source = system_source + + _cnx = None + + def cnxset_freed(self): + self.cu.close() + self.cnx.close() + self.cnx = self.cu = None + + @property + def cnx(self): + if self._cnx is None: + self._cnx = self._source.get_connection() + self._cu = self._cnx.cursor() + return self._cnx + @cnx.setter + def cnx(self, value): + self._cnx = value + + @property + def cu(self): + if self._cnx is None: + self._cnx = self._source.get_connection() + self._cu = self._cnx.cursor() + return self._cu + @cu.setter + def cu(self, value): + self._cu = value + + class SQLAdapterMixIn(object): """Mixin for SQL data sources, getting a connection from a configuration dictionary and handling connection locking """ + cnx_wrap = ConnectionWrapper def __init__(self, source_config): try: @@ -208,6 +325,15 @@ self._binary = self.dbhelper.binary_value self._process_value = dbapi_module.process_value self._dbencoding = dbencoding + if self.dbdriver == 'sqlite': + self.cnx_wrap = SqliteConnectionWrapper + self.dbhelper.dbname = abspath(self.dbhelper.dbname) + + def wrapped_connection(self): + """open and return a connection to the database, wrapped into a class + handling reconnection and all + """ + return self.cnx_wrap(self) def get_connection(self): """open and return a connection to the database""" @@ -319,10 +445,11 @@ # only defining here to prevent pylint from complaining info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None -from logging import getLogger -from cubicweb import set_log_methods set_log_methods(SQLAdapterMixIn, getLogger('cubicweb.sqladapter')) + +# connection initialization functions ########################################## + def init_sqlite_connexion(cnx): class group_concat(object): diff -r 84738d495ffd -r 793377697c81 server/ssplanner.py --- a/server/ssplanner.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/ssplanner.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -68,13 +68,13 @@ """return a dict mapping rqlst variable object to their eid if specified in the syntax tree """ - session = plan.session + cnx = plan.cnx if rqlst.where is None: return {} eidconsts = {} - neweids = session.transaction_data.get('neweids', ()) - checkread = session.read_security - eschema = session.vreg.schema.eschema + neweids = cnx.transaction_data.get('neweids', ()) + checkread = cnx.read_security + eschema = cnx.vreg.schema.eschema for rel in rqlst.where.get_nodes(Relation): # only care for 'eid' relations ... if (rel.r_type == 'eid' @@ -89,9 +89,9 @@ # the generated select substep if not emited (eg nothing # to be selected) if checkread and eid not in neweids: - with session.security_enabled(read=False): - eschema(session.describe(eid)[0]).check_perm( - session, 'read', eid=eid) + with cnx.security_enabled(read=False): + eschema(cnx.entity_metas(eid)['type']).check_perm( + cnx, 'read', eid=eid) eidconsts[lhs.variable] = eid return eidconsts @@ -145,17 +145,17 @@ the rqlst should not be tagged at this point. """ plan.preprocess(rqlst) - return (OneFetchStep(plan, rqlst, plan.session.repo.sources),) + return (OneFetchStep(plan, rqlst),) def build_insert_plan(self, plan, rqlst): """get an execution plan from an INSERT RQL query""" # each variable in main variables is a new entity to insert to_build = {} - session = plan.session - etype_class = session.vreg['etypes'].etype_class + cnx = plan.cnx + etype_class = cnx.vreg['etypes'].etype_class for etype, var in rqlst.main_variables: # need to do this since entity class is shared w. web client code ! - to_build[var.name] = EditedEntity(etype_class(etype)(session)) + to_build[var.name] = EditedEntity(etype_class(etype)(cnx)) plan.add_entity_def(to_build[var.name]) # add constant values to entity def, mark variables to be selected to_select = _extract_const_attributes(plan, rqlst, to_build) @@ -311,24 +311,6 @@ maprepr[var] = '%s.%s' % (tablesinorder[table], col) return maprepr -def offset_result(offset, result): - offset -= len(result) - if offset < 0: - result = result[offset:] - offset = None - elif offset == 0: - offset = None - result = () - return offset, result - - -class LimitOffsetMixIn(object): - limit = offset = None - def set_limit_offset(self, limit, offset): - self.limit = limit - self.offset = offset or None - - class Step(object): """base abstract class for execution step""" def __init__(self, plan): @@ -357,29 +339,21 @@ [step.test_repr() for step in self.children],) -class OneFetchStep(LimitOffsetMixIn, Step): +class OneFetchStep(Step): """step consisting in fetching data from sources and directly returning results """ - def __init__(self, plan, union, sources, inputmap=None): + def __init__(self, plan, union, inputmap=None): Step.__init__(self, plan) self.union = union - self.sources = sources self.inputmap = inputmap - self.set_limit_offset(union.children[-1].limit, union.children[-1].offset) - - def set_limit_offset(self, limit, offset): - LimitOffsetMixIn.set_limit_offset(self, limit, offset) - for select in self.union.children: - select.limit = limit - select.offset = offset def execute(self): """call .syntax_tree_search with the given syntax tree on each source for each solution """ self.execute_children() - session = self.plan.session + cnx = self.plan.cnx args = self.plan.args inputmap = self.inputmap union = self.union @@ -395,31 +369,9 @@ cachekey = tuple(cachekey) else: cachekey = union.as_string() - result = [] - # limit / offset processing - limit = self.limit - offset = self.offset - if offset is not None: - if len(self.sources) > 1: - # we'll have to deal with limit/offset by ourself - if union.children[-1].limit: - union.children[-1].limit = limit + offset - union.children[-1].offset = None - else: - offset, limit = None, None - for source in self.sources: - if offset is None and limit is not None: - # modifying the sample rqlst is enough since sql generation - # will pick it here as well - union.children[-1].limit = limit - len(result) - result_ = source.syntax_tree_search(session, union, args, cachekey, - inputmap) - if offset is not None: - offset, result_ = offset_result(offset, result_) - result += result_ - if limit is not None: - if len(result) >= limit: - return result[:limit] + # get results for query + source = cnx.repo.system_source + result = source.syntax_tree_search(cnx, union, args, cachekey, inputmap) #print 'ONEFETCH RESULT %s' % (result) return result @@ -432,8 +384,7 @@ return (self.__class__.__name__, sorted((r.as_string(kwargs=self.plan.args), r.solutions) for r in self.union.children), - self.limit, self.offset, - sorted(self.sources), inputmap) + inputmap) # UPDATE/INSERT/DELETE steps ################################################## @@ -515,8 +466,8 @@ results = self.execute_child() if results: todelete = frozenset(int(eid) for eid, in results) - session = self.plan.session - session.repo.glob_delete_entities(session, todelete) + cnx = self.plan.cnx + cnx.repo.glob_delete_entities(cnx, todelete) return results class DeleteRelationsStep(Step): @@ -528,10 +479,10 @@ def execute(self): """execute this step""" - session = self.plan.session - delete = session.repo.glob_delete_relation + cnx = self.plan.cnx + delete = cnx.repo.glob_delete_relation for subj, obj in self.execute_child(): - delete(session, subj, self.rtype, obj) + delete(cnx, subj, self.rtype, obj) class UpdateStep(Step): @@ -545,8 +496,8 @@ def execute(self): """execute this step""" - session = self.plan.session - repo = session.repo + cnx = self.plan.cnx + repo = cnx.repo edefs = {} relations = {} # insert relations @@ -564,7 +515,7 @@ try: edited = edefs[eid] except KeyError: - edef = session.entity_from_eid(eid) + edef = cnx.entity_from_eid(eid) edefs[eid] = edited = EditedEntity(edef) edited.edited_attribute(str(rschema), rhsval) else: @@ -575,9 +526,9 @@ relations[str_rschema] = [(lhsval, rhsval)] result[i] = newrow # update entities - repo.glob_add_relations(session, relations) + repo.glob_add_relations(cnx, relations) for eid, edited in edefs.iteritems(): - repo.glob_update_entity(session, edited) + repo.glob_update_entity(cnx, edited) return result def _handle_relterm(info, row, newrow): diff -r 84738d495ffd -r 793377697c81 server/test/data-schemaserial/bootstrap_cubes --- a/server/test/data-schemaserial/bootstrap_cubes Wed Sep 24 17:35:59 2014 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -card,comment,folder,tag,basket,email,file,localperms diff -r 84738d495ffd -r 793377697c81 server/test/data-schemaserial/schema.py --- a/server/test/data-schemaserial/schema.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/data-schemaserial/schema.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -16,241 +16,17 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -from yams.buildobjs import (EntityType, RelationType, RelationDefinition, - SubjectRelation, RichString, String, Int, Float, - Boolean, Datetime, TZDatetime, Bytes) -from yams.constraints import SizeConstraint -from cubicweb.schema import (WorkflowableEntityType, - RQLConstraint, RQLUniqueConstraint, - ERQLExpression, RRQLExpression) - -from yams.buildobjs import make_type +from yams.buildobjs import EntityType, SubjectRelation, String, make_type BabarTestType = make_type('BabarTestType') - -class Affaire(WorkflowableEntityType): - __permissions__ = { - 'read': ('managers', - ERQLExpression('X owned_by U'), ERQLExpression('X concerne S?, S owned_by U')), - 'add': ('managers', ERQLExpression('X concerne S, S owned_by U')), - 'update': ('managers', 'owners', ERQLExpression('X in_state S, S name in ("pitetre", "en cours")')), - 'delete': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')), - } - - ref = String(fulltextindexed=True, indexed=True, - constraints=[SizeConstraint(16)]) - sujet = String(fulltextindexed=True, - constraints=[SizeConstraint(256)]) - descr = RichString(fulltextindexed=True, - description=_('more detailed description')) - - duration = Int() - invoiced = Float() - opt_attr = Bytes() - - depends_on = SubjectRelation('Affaire') - require_permission = SubjectRelation('CWPermission') - concerne = SubjectRelation(('Societe', 'Note')) - todo_by = SubjectRelation('Personne', cardinality='?*') - documented_by = SubjectRelation('Card') - - -class Societe(EntityType): - __unique_together__ = [('nom', 'type', 'cp')] - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'update': ('managers', 'owners', ERQLExpression('U login L, X nom L')), - 'delete': ('managers', 'owners', ERQLExpression('U login L, X nom L')), - 'add': ('managers', 'users',) - } - - nom = String(maxsize=64, fulltextindexed=True) - web = String(maxsize=128) - type = String(maxsize=128) # attribute in common with Note - tel = Int() - fax = Int() - rncs = String(maxsize=128) - ad1 = String(maxsize=128) - ad2 = String(maxsize=128) - ad3 = String(maxsize=128) - cp = String(maxsize=12) - ville= String(maxsize=32) - - -class Division(Societe): - __specializes_schema__ = True - -class SubDivision(Division): - __specializes_schema__ = True - -class travaille_subdivision(RelationDefinition): - subject = 'Personne' - object = 'SubDivision' - -from cubicweb.schemas.base import CWUser -CWUser.get_relations('login').next().fulltextindexed = True - -class Note(WorkflowableEntityType): - date = String(maxsize=10) - type = String(maxsize=6) - para = String(maxsize=512, - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'update': ('managers', ERQLExpression('X in_state S, S name "todo"')), - }) - - migrated_from = SubjectRelation('Note') - attachment = SubjectRelation('File') - inline1 = SubjectRelation('Affaire', inlined=True, cardinality='?*', - constraints=[RQLUniqueConstraint('S type T, S inline1 A1, A1 todo_by C, ' - 'Y type T, Y inline1 A2, A2 todo_by C', - 'S,Y')]) - todo_by = SubjectRelation('CWUser') +class Affaire(EntityType): + nom = String(unique=True, maxsize=64) class Personne(EntityType): __unique_together__ = [('nom', 'prenom', 'inline2')] nom = String(fulltextindexed=True, required=True, maxsize=64) prenom = String(fulltextindexed=True, maxsize=64) - sexe = String(maxsize=1, default='M', fulltextindexed=True) - promo = String(vocabulary=('bon','pasbon')) - titre = String(fulltextindexed=True, maxsize=128) - adel = String(maxsize=128) - ass = String(maxsize=128) - web = String(maxsize=128) - tel = Int() - fax = Int() - datenaiss = Datetime() - tzdatenaiss = TZDatetime() - test = Boolean(__permissions__={ - 'read': ('managers', 'users', 'guests'), - 'update': ('managers',), - }) - description = String() - firstname = String(fulltextindexed=True, maxsize=64) - - concerne = SubjectRelation('Affaire') - connait = SubjectRelation('Personne') inline2 = SubjectRelation('Affaire', inlined=True, cardinality='?*') custom_field_of_jungle = BabarTestType(jungle_speed=42) - -class Old(EntityType): - name = String() - - -class connait(RelationType): - symmetric = True - -class concerne(RelationType): - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers', RRQLExpression('U has_update_permission S')), - 'delete': ('managers', RRQLExpression('O owned_by U')), - } - -class travaille(RelationDefinition): - __permissions__ = { - 'read': ('managers', 'users', 'guests'), - 'add': ('managers', RRQLExpression('U has_update_permission S')), - 'delete': ('managers', RRQLExpression('O owned_by U')), - } - subject = 'Personne' - object = 'Societe' - -class comments(RelationDefinition): - subject = 'Comment' - object = 'Personne' - -class fiche(RelationDefinition): - inlined = True - subject = 'Personne' - object = 'Card' - cardinality = '??' - -class multisource_inlined_rel(RelationDefinition): - inlined = True - cardinality = '?*' - subject = ('Card', 'Note') - object = ('Affaire', 'Note') - -class multisource_rel(RelationDefinition): - subject = ('Card', 'Note') - object = 'Note' - -class multisource_crossed_rel(RelationDefinition): - subject = ('Card', 'Note') - object = 'Note' - - -class see_also_1(RelationDefinition): - name = 'see_also' - subject = object = 'Folder' - -class see_also_2(RelationDefinition): - name = 'see_also' - subject = ('Bookmark', 'Note') - object = ('Bookmark', 'Note') - -class evaluee(RelationDefinition): - subject = ('Personne', 'CWUser', 'Societe') - object = ('Note') - -class ecrit_par(RelationType): - inlined = True - -class ecrit_par_1(RelationDefinition): - name = 'ecrit_par' - subject = 'Note' - object ='Personne' - constraints = [RQLConstraint('E concerns P, S version_of P')] - cardinality = '?*' - -class ecrit_par_2(RelationDefinition): - name = 'ecrit_par' - subject = 'Note' - object ='CWUser' - cardinality='?*' - - -class copain(RelationDefinition): - subject = object = 'CWUser' - -class tags(RelationDefinition): - subject = 'Tag' - object = ('CWUser', 'CWGroup', 'State', 'Note', 'Card', 'Affaire') - -class filed_under(RelationDefinition): - subject = ('Note', 'Affaire') - object = 'Folder' - -class require_permission(RelationDefinition): - subject = ('Card', 'Note', 'Personne') - object = 'CWPermission' - -class require_state(RelationDefinition): - subject = 'CWPermission' - object = 'State' - -class personne_composite(RelationDefinition): - subject='Personne' - object='Personne' - composite='subject' - -class personne_inlined(RelationDefinition): - subject='Personne' - object='Personne' - cardinality='?*' - inlined=True - - -class login_user(RelationDefinition): - subject = 'Personne' - object = 'CWUser' - cardinality = '??' - -class ambiguous_inlined(RelationDefinition): - subject = ('Affaire', 'Note') - object = 'CWUser' - inlined = True - cardinality = '?*' diff -r 84738d495ffd -r 793377697c81 server/test/data/migratedapp/schema.py --- a/server/test/data/migratedapp/schema.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/data/migratedapp/schema.py Wed Sep 24 18:04:30 2014 +0200 @@ -143,6 +143,7 @@ class evaluee(RelationDefinition): subject = ('Personne', 'CWUser', 'Societe') object = ('Note') + constraints = [RQLVocabularyConstraint('S owned_by U')] class ecrit_par(RelationType): __permissions__ = {'read': ('managers', 'users', 'guests',), diff -r 84738d495ffd -r 793377697c81 server/test/data/schema.py --- a/server/test/data/schema.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/data/schema.py Wed Sep 24 18:04:30 2014 +0200 @@ -195,14 +195,6 @@ subject = ('Card', 'Note') object = ('Affaire', 'Note') -class multisource_rel(RelationDefinition): - subject = ('Card', 'Note') - object = 'Note' - -class multisource_crossed_rel(RelationDefinition): - subject = ('Card', 'Note') - object = 'Note' - class see_also_1(RelationDefinition): name = 'see_also' @@ -228,7 +220,6 @@ name = 'ecrit_par' subject = 'Note' object ='Personne' - constraints = [RQLConstraint('E concerns P, S version_of P')] cardinality = '?*' class ecrit_par_2(RelationDefinition): diff -r 84738d495ffd -r 793377697c81 server/test/data/sources_postgres --- a/server/test/data/sources_postgres Wed Sep 24 17:35:59 2014 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,14 +0,0 @@ -[system] - -db-driver = postgres -db-host = localhost -db-port = 5433 -adapter = native -db-name = cw_fti_test -db-encoding = UTF-8 -db-user = syt -db-password = syt - -[admin] -login = admin -password = gingkow diff -r 84738d495ffd -r 793377697c81 server/test/unittest_checkintegrity.py --- a/server/test/unittest_checkintegrity.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_checkintegrity.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -25,40 +25,40 @@ from cubicweb.server.checkintegrity import check, reindex_entities class CheckIntegrityTC(TestCase): + def setUp(self): handler = get_test_db_handler(TestServerConfiguration(apphome=self.datadir)) handler.build_db_cache() - self.repo, self.cnx = handler.get_repo_and_cnx() - session = self.repo._get_session(self.cnx.sessionid, setcnxset=True) - self.session = session - self.execute = session.execute + self.repo, _cnx = handler.get_repo_and_cnx() sys.stderr = sys.stdout = StringIO() def tearDown(self): sys.stderr = sys.__stderr__ sys.stdout = sys.__stdout__ - self.cnx.close() self.repo.shutdown() def test_checks(self): - check(self.repo, self.cnx, ('entities', 'relations', 'text_index', 'metadata'), - reindex=False, fix=True, withpb=False) + with self.repo.internal_cnx() as cnx: + check(self.repo, cnx, ('entities', 'relations', 'text_index', 'metadata'), + reindex=False, fix=True, withpb=False) def test_reindex_all(self): - self.execute('INSERT Personne X: X nom "toto", X prenom "tutu"') - self.session.commit(False) - self.assertTrue(self.execute('Any X WHERE X has_text "tutu"')) - reindex_entities(self.repo.schema, self.session, withpb=False) - self.assertTrue(self.execute('Any X WHERE X has_text "tutu"')) + with self.repo.internal_cnx() as cnx: + cnx.execute('INSERT Personne X: X nom "toto", X prenom "tutu"') + cnx.commit() + self.assertTrue(cnx.execute('Any X WHERE X has_text "tutu"')) + reindex_entities(self.repo.schema, cnx, withpb=False) + self.assertTrue(cnx.execute('Any X WHERE X has_text "tutu"')) def test_reindex_etype(self): - self.execute('INSERT Personne X: X nom "toto", X prenom "tutu"') - self.execute('INSERT Affaire X: X ref "toto"') - self.session.commit(False) - reindex_entities(self.repo.schema, self.session, withpb=False, - etypes=('Personne',)) - self.assertTrue(self.execute('Any X WHERE X has_text "tutu"')) - self.assertTrue(self.execute('Any X WHERE X has_text "toto"')) + with self.repo.internal_cnx() as cnx: + cnx.execute('INSERT Personne X: X nom "toto", X prenom "tutu"') + cnx.execute('INSERT Affaire X: X ref "toto"') + cnx.commit() + reindex_entities(self.repo.schema, cnx, withpb=False, + etypes=('Personne',)) + self.assertTrue(cnx.execute('Any X WHERE X has_text "tutu"')) + self.assertTrue(cnx.execute('Any X WHERE X has_text "toto"')) if __name__ == '__main__': unittest_main() diff -r 84738d495ffd -r 793377697c81 server/test/unittest_datafeed.py --- a/server/test/unittest_datafeed.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_datafeed.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2011-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2011-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -24,14 +24,15 @@ class DataFeedTC(CubicWebTC): def setup_database(self): - self.request().create_entity('CWSource', name=u'myfeed', type=u'datafeed', - parser=u'testparser', url=u'ignored', - config=u'synchronization-interval=1min') + with self.admin_access.repo_cnx() as cnx: + cnx.create_entity('CWSource', name=u'myfeed', type=u'datafeed', + parser=u'testparser', url=u'ignored', + config=u'synchronization-interval=1min') + cnx.commit() def test(self): self.assertIn('myfeed', self.repo.sources_by_uri) dfsource = self.repo.sources_by_uri['myfeed'] - self.assertNotIn(dfsource, self.repo.sources) self.assertEqual(dfsource.latest_retrieval, None) self.assertEqual(dfsource.synchro_interval, timedelta(seconds=60)) self.assertFalse(dfsource.fresh()) @@ -49,77 +50,74 @@ entity.cw_edited.update(sourceparams['item']) with self.temporary_appobjects(AParser): - session = self.repo.internal_session() - stats = dfsource.pull_data(session, force=True) - self.commit() - # test import stats - self.assertEqual(sorted(stats), ['checked', 'created', 'updated']) - self.assertEqual(len(stats['created']), 1) - entity = self.execute('Card X').get_entity(0, 0) - self.assertIn(entity.eid, stats['created']) - self.assertEqual(stats['updated'], set()) - # test imported entities - self.assertEqual(entity.title, 'cubicweb.org') - self.assertEqual(entity.content, 'the cw web site') + with self.repo.internal_cnx() as cnx: + stats = dfsource.pull_data(cnx, force=True) + cnx.commit() + # test import stats + self.assertEqual(sorted(stats), ['checked', 'created', 'updated']) + self.assertEqual(len(stats['created']), 1) + entity = cnx.execute('Card X').get_entity(0, 0) + self.assertIn(entity.eid, stats['created']) + self.assertEqual(stats['updated'], set()) + # test imported entities + self.assertEqual(entity.title, 'cubicweb.org') + self.assertEqual(entity.content, 'the cw web site') + self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/') + self.assertEqual(entity.cw_source[0].name, 'myfeed') + self.assertEqual(entity.cw_metainformation(), + {'type': 'Card', + 'source': {'uri': 'myfeed', 'type': 'datafeed', 'use-cwuri-as-url': True}, + 'extid': 'http://www.cubicweb.org/'} + ) + self.assertEqual(entity.absolute_url(), 'http://www.cubicweb.org/') + # test repo cache keys + self.assertEqual(self.repo._type_source_cache[entity.eid], + ('Card', 'http://www.cubicweb.org/', 'myfeed')) + self.assertEqual(self.repo._extid_cache['http://www.cubicweb.org/'], + entity.eid) + # test repull + stats = dfsource.pull_data(cnx, force=True) + self.assertEqual(stats['created'], set()) + self.assertEqual(stats['updated'], set((entity.eid,))) + # test repull with caches reseted + self.repo._type_source_cache.clear() + self.repo._extid_cache.clear() + stats = dfsource.pull_data(cnx, force=True) + self.assertEqual(stats['created'], set()) + self.assertEqual(stats['updated'], set((entity.eid,))) + self.assertEqual(self.repo._type_source_cache[entity.eid], + ('Card', 'http://www.cubicweb.org/', 'myfeed')) + self.assertEqual(self.repo._extid_cache['http://www.cubicweb.org/'], + entity.eid) + + self.assertEqual(dfsource.source_cwuris(cnx), + {'http://www.cubicweb.org/': (entity.eid, 'Card')} + ) + self.assertTrue(dfsource.latest_retrieval) + self.assertTrue(dfsource.fresh()) + + # test_rename_source + with self.admin_access.repo_cnx() as cnx: + cnx.execute('SET S name "myrenamedfeed" WHERE S is CWSource, S name "myfeed"') + cnx.commit() + entity = cnx.execute('Card X').get_entity(0, 0) self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/') - self.assertEqual(entity.cw_source[0].name, 'myfeed') + self.assertEqual(entity.cw_source[0].name, 'myrenamedfeed') self.assertEqual(entity.cw_metainformation(), {'type': 'Card', - 'source': {'uri': 'myfeed', 'type': 'datafeed', 'use-cwuri-as-url': True}, + 'source': {'uri': 'myrenamedfeed', 'type': 'datafeed', 'use-cwuri-as-url': True}, 'extid': 'http://www.cubicweb.org/'} ) - self.assertEqual(entity.absolute_url(), 'http://www.cubicweb.org/') - # test repo cache keys self.assertEqual(self.repo._type_source_cache[entity.eid], - ('Card', 'system', 'http://www.cubicweb.org/', 'myfeed')) - self.assertEqual(self.repo._extid_cache[('http://www.cubicweb.org/', 'system')], - entity.eid) - # test repull - session.set_cnxset() - stats = dfsource.pull_data(session, force=True) - self.assertEqual(stats['created'], set()) - self.assertEqual(stats['updated'], set((entity.eid,))) - # test repull with caches reseted - self.repo._type_source_cache.clear() - self.repo._extid_cache.clear() - session.set_cnxset() - stats = dfsource.pull_data(session, force=True) - self.assertEqual(stats['created'], set()) - self.assertEqual(stats['updated'], set((entity.eid,))) - self.assertEqual(self.repo._type_source_cache[entity.eid], - ('Card', 'system', 'http://www.cubicweb.org/', 'myfeed')) - self.assertEqual(self.repo._extid_cache[('http://www.cubicweb.org/', 'system')], + ('Card', 'http://www.cubicweb.org/', 'myrenamedfeed')) + self.assertEqual(self.repo._extid_cache['http://www.cubicweb.org/'], entity.eid) - self.assertEqual(dfsource.source_cwuris(self.session), - {'http://www.cubicweb.org/': (entity.eid, 'Card')} - ) - self.assertTrue(dfsource.latest_retrieval) - self.assertTrue(dfsource.fresh()) - - # test_rename_source - req = self.request() - req.execute('SET S name "myrenamedfeed" WHERE S is CWSource, S name "myfeed"') - self.commit() - entity = self.execute('Card X').get_entity(0, 0) - self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/') - self.assertEqual(entity.cw_source[0].name, 'myrenamedfeed') - self.assertEqual(entity.cw_metainformation(), - {'type': 'Card', - 'source': {'uri': 'myrenamedfeed', 'type': 'datafeed', 'use-cwuri-as-url': True}, - 'extid': 'http://www.cubicweb.org/'} - ) - self.assertEqual(self.repo._type_source_cache[entity.eid], - ('Card', 'system', 'http://www.cubicweb.org/', 'myrenamedfeed')) - self.assertEqual(self.repo._extid_cache[('http://www.cubicweb.org/', 'system')], - entity.eid) - - # test_delete_source - req = self.request() - req.execute('DELETE CWSource S WHERE S name "myrenamedfeed"') - self.commit() - self.assertFalse(self.execute('Card X WHERE X title "cubicweb.org"')) - self.assertFalse(self.execute('Any X WHERE X has_text "cubicweb.org"')) + # test_delete_source + cnx.execute('DELETE CWSource S WHERE S name "myrenamedfeed"') + cnx.commit() + self.assertFalse(cnx.execute('Card X WHERE X title "cubicweb.org"')) + self.assertFalse(cnx.execute('Any X WHERE X has_text "cubicweb.org"')) if __name__ == '__main__': from logilab.common.testlib import unittest_main diff -r 84738d495ffd -r 793377697c81 server/test/unittest_hook.py --- a/server/test/unittest_hook.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_hook.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -25,51 +25,35 @@ from cubicweb.server import hook from cubicweb.hooks import integrity, syncschema -def clean_session_ops(func): - def wrapper(self, *args, **kwargs): - try: - return func(self, *args, **kwargs) - finally: - self.session.pending_operations[:] = [] - return wrapper - class OperationsTC(CubicWebTC): def setUp(self): CubicWebTC.setUp(self) self.hm = self.repo.hm - @clean_session_ops def test_late_operation(self): - session = self.session - l1 = hook.LateOperation(session) - l2 = hook.LateOperation(session) - l3 = hook.Operation(session) - self.assertEqual(session.pending_operations, [l3, l1, l2]) + with self.admin_access.repo_cnx() as cnx: + l1 = hook.LateOperation(cnx) + l2 = hook.LateOperation(cnx) + l3 = hook.Operation(cnx) + self.assertEqual(cnx.pending_operations, [l3, l1, l2]) - @clean_session_ops def test_single_last_operation(self): - session = self.session - l0 = hook.SingleLastOperation(session) - l1 = hook.LateOperation(session) - l2 = hook.LateOperation(session) - l3 = hook.Operation(session) - self.assertEqual(session.pending_operations, [l3, l1, l2, l0]) - l4 = hook.SingleLastOperation(session) - self.assertEqual(session.pending_operations, [l3, l1, l2, l4]) + with self.admin_access.repo_cnx() as cnx: + l0 = hook.SingleLastOperation(cnx) + l1 = hook.LateOperation(cnx) + l2 = hook.LateOperation(cnx) + l3 = hook.Operation(cnx) + self.assertEqual(cnx.pending_operations, [l3, l1, l2, l0]) + l4 = hook.SingleLastOperation(cnx) + self.assertEqual(cnx.pending_operations, [l3, l1, l2, l4]) - @clean_session_ops def test_global_operation_order(self): - session = self.session - op1 = integrity._DelayedDeleteOp(session) - op2 = syncschema.RDefDelOp(session) - # equivalent operation generated by op2 but replace it here by op3 so we - # can check the result... - op3 = syncschema.MemSchemaNotifyChanges(session) - op4 = integrity._DelayedDeleteOp(session) - op5 = integrity._CheckORelationOp(session) - self.assertEqual(session.pending_operations, [op1, op2, op4, op5, op3]) - + with self.admin_access.repo_cnx() as cnx: + op1 = syncschema.RDefDelOp(cnx) + op2 = integrity._CheckORelationOp(cnx) + op3 = syncschema.MemSchemaNotifyChanges(cnx) + self.assertEqual([op1, op2, op3], cnx.pending_operations) class HookCalled(Exception): pass @@ -144,9 +128,10 @@ def test_session_open_close(self): import hooks # cubicweb/server/test/data/hooks.py - cnx = self.login('anon') - self.assertEqual(hooks.CALLED_EVENTS['session_open'], 'anon') - cnx.close() + anonaccess = self.new_access('anon') + with anonaccess.repo_cnx() as cnx: + self.assertEqual(hooks.CALLED_EVENTS['session_open'], 'anon') + anonaccess.close() self.assertEqual(hooks.CALLED_EVENTS['session_close'], 'anon') diff -r 84738d495ffd -r 793377697c81 server/test/unittest_ldapsource.py --- a/server/test/unittest_ldapsource.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_ldapsource.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -31,7 +31,6 @@ from cubicweb.devtools.testlib import CubicWebTC from cubicweb.devtools.repotest import RQLGeneratorTC from cubicweb.devtools.httptest import get_available_port -from cubicweb.devtools import get_test_db_handler CONFIG_LDAPFEED = u''' @@ -123,32 +122,29 @@ pass @classmethod - def pre_setup_database(cls, session, config): - session.create_entity('CWSource', name=u'ldap', type=u'ldapfeed', parser=u'ldapfeed', - url=URL, config=CONFIG_LDAPFEED) + def pre_setup_database(cls, cnx, config): + cnx.create_entity('CWSource', name=u'ldap', type=u'ldapfeed', parser=u'ldapfeed', + url=URL, config=CONFIG_LDAPFEED) - session.commit() - return cls._pull(session) + cnx.commit() + return cls.pull(cnx) @classmethod - def _pull(cls, session): - with session.repo.internal_session() as isession: - lfsource = isession.repo.sources_by_uri['ldap'] - stats = lfsource.pull_data(isession, force=True, raise_on_error=True) - isession.commit() - return stats - - def pull(self): - return self._pull(self.session) + def pull(self, cnx): + lfsource = cnx.repo.sources_by_uri['ldap'] + stats = lfsource.pull_data(cnx, force=True, raise_on_error=True) + cnx.commit() + return stats def setup_database(self): - with self.session.repo.internal_session(safe=True) as session: - session.execute('DELETE Any E WHERE E cw_source S, S name "ldap"') - session.execute('SET S config %(conf)s, S url %(url)s ' - 'WHERE S is CWSource, S name "ldap"', - {"conf": CONFIG_LDAPFEED, 'url': URL} ) - session.commit() - self.pull() + with self.admin_access.repo_cnx() as cnx: + cnx.execute('DELETE Any E WHERE E cw_source S, S name "ldap"') + cnx.execute('SET S config %(conf)s, S url %(url)s ' + 'WHERE S is CWSource, S name "ldap"', + {"conf": CONFIG_LDAPFEED, 'url': URL} ) + cnx.commit() + with self.repo.internal_cnx() as cnx: + self.pull(cnx) def add_ldap_entry(self, dn, mods): """ @@ -201,16 +197,16 @@ """ def test_wrong_group(self): - with self.session.repo.internal_session(safe=True) as session: - source = self.session.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0) + with self.admin_access.repo_cnx() as cnx: + source = cnx.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0) config = source.repo_source.check_config(source) # inject a bogus group here, along with at least a valid one config['user-default-group'] = ('thisgroupdoesnotexists','users') source.repo_source.update_config(source, config) - session.commit(free_cnxset=False) + cnx.commit() # here we emitted an error log entry - stats = source.repo_source.pull_data(session, force=True, raise_on_error=True) - session.commit() + stats = source.repo_source.pull_data(cnx, force=True, raise_on_error=True) + cnx.commit() @@ -225,119 +221,131 @@ def test_authenticate(self): source = self.repo.sources_by_uri['ldap'] - self.session.set_cnxset() - # ensure we won't be logged against - self.assertRaises(AuthenticationError, - source.authenticate, self.session, 'toto', 'toto') - self.assertTrue(source.authenticate(self.session, 'syt', 'syt')) - self.assertTrue(self.repo.connect('syt', password='syt')) + with self.admin_access.repo_cnx() as cnx: + # ensure we won't be logged against + self.assertRaises(AuthenticationError, + source.authenticate, cnx, 'toto', 'toto') + self.assertTrue(source.authenticate(cnx, 'syt', 'syt')) + sessionid = self.repo.connect('syt', password='syt') + self.assertTrue(sessionid) + self.repo.close(sessionid) def test_base(self): - # check a known one - rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'}) - e = rset.get_entity(0, 0) - self.assertEqual(e.login, 'syt') - e.complete() - self.assertMetadata(e) - self.assertEqual(e.firstname, None) - self.assertEqual(e.surname, None) - self.assertTrue('users' in [g.name for g in e.in_group]) - self.assertEqual(e.owned_by[0].login, 'syt') - self.assertEqual(e.created_by, ()) - addresses = [pe.address for pe in e.use_email] - addresses.sort() - self.assertEqual(['sylvain.thenault@logilab.fr', 'syt@logilab.fr'], - addresses) - self.assertIn(e.primary_email[0].address, ['sylvain.thenault@logilab.fr', - 'syt@logilab.fr']) - # email content should be indexed on the user - rset = self.sexecute('CWUser X WHERE X has_text "thenault"') - self.assertEqual(rset.rows, [[e.eid]]) + with self.admin_access.repo_cnx() as cnx: + # check a known one + rset = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'}) + e = rset.get_entity(0, 0) + self.assertEqual(e.login, 'syt') + e.complete() + self.assertMetadata(e) + self.assertEqual(e.firstname, None) + self.assertEqual(e.surname, None) + self.assertIn('users', set(g.name for g in e.in_group)) + self.assertEqual(e.owned_by[0].login, 'syt') + self.assertEqual(e.created_by, ()) + addresses = [pe.address for pe in e.use_email] + addresses.sort() + self.assertEqual(['sylvain.thenault@logilab.fr', 'syt@logilab.fr'], + addresses) + self.assertIn(e.primary_email[0].address, ['sylvain.thenault@logilab.fr', + 'syt@logilab.fr']) + # email content should be indexed on the user + rset = cnx.execute('CWUser X WHERE X has_text "thenault"') + self.assertEqual(rset.rows, [[e.eid]]) def test_copy_to_system_source(self): "make sure we can 'convert' an LDAP user into a system one" - source = self.repo.sources_by_uri['ldap'] - eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0] - self.sexecute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': eid}) - self.commit() - source.reset_caches() - rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'}) - self.assertEqual(len(rset), 1) - e = rset.get_entity(0, 0) - self.assertEqual(e.eid, eid) - self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', - 'uri': u'system', - 'use-cwuri-as-url': False}, - 'type': 'CWUser', - 'extid': None}) - self.assertEqual(e.cw_source[0].name, 'system') - self.assertTrue(e.creation_date) - self.assertTrue(e.modification_date) - source.pull_data(self.session) - rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'}) - self.assertEqual(len(rset), 1) - self.assertTrue(self.repo.system_source.authenticate( - self.session, 'syt', password='syt')) - # make sure the pull from ldap have not "reverted" user as a ldap-feed user - self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', - 'uri': u'system', - 'use-cwuri-as-url': False}, - 'type': 'CWUser', - 'extid': None}) - # and that the password stored in the system source is not empty or so - user = self.execute('CWUser U WHERE U login "syt"').get_entity(0, 0) - user.cw_clear_all_caches() - pwd = self.session.system_sql("SELECT cw_upassword FROM cw_cwuser WHERE cw_login='syt';").fetchall()[0][0] - self.assertIsNotNone(pwd) - self.assertTrue(str(pwd)) + with self.admin_access.repo_cnx() as cnx: + source = self.repo.sources_by_uri['ldap'] + eid = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0] + cnx.execute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': eid}) + cnx.commit() + source.reset_caches() + rset = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'}) + self.assertEqual(len(rset), 1) + e = rset.get_entity(0, 0) + self.assertEqual(e.eid, eid) + self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', + 'uri': u'system', + 'use-cwuri-as-url': False}, + 'type': 'CWUser', + 'extid': None}) + self.assertEqual(e.cw_source[0].name, 'system') + self.assertTrue(e.creation_date) + self.assertTrue(e.modification_date) + source.pull_data(cnx) + rset = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'}) + self.assertEqual(len(rset), 1) + self.assertTrue(self.repo.system_source.authenticate(cnx, 'syt', password='syt')) + # make sure the pull from ldap have not "reverted" user as a ldap-feed user + self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', + 'uri': u'system', + 'use-cwuri-as-url': False}, + 'type': 'CWUser', + 'extid': None}) + # and that the password stored in the system source is not empty or so + user = cnx.execute('CWUser U WHERE U login "syt"').get_entity(0, 0) + user.cw_clear_all_caches() + pwd = cnx.system_sql("SELECT cw_upassword FROM cw_cwuser WHERE cw_login='syt';").fetchall()[0][0] + self.assertIsNotNone(pwd) + self.assertTrue(str(pwd)) class LDAPFeedUserDeletionTC(LDAPFeedTestBase): """ A testcase for situations where users are deleted from or - unavailabe in the LDAP database. + unavailable in the LDAP database. """ + def test_a_filter_inactivate(self): """ filtered out people should be deactivated, unable to authenticate """ - source = self.session.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0) - config = source.repo_source.check_config(source) - # filter with adim's phone number - config['user-filter'] = u'(%s=%s)' % ('telephoneNumber', '109') - source.repo_source.update_config(source, config) - self.commit() - self.pull() + with self.admin_access.repo_cnx() as cnx: + source = cnx.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0) + config = source.repo_source.check_config(source) + # filter with adim's phone number + config['user-filter'] = u'(%s=%s)' % ('telephoneNumber', '109') + source.repo_source.update_config(source, config) + cnx.commit() + with self.repo.internal_cnx() as cnx: + self.pull(cnx) self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt') - self.assertEqual(self.execute('Any N WHERE U login "syt", ' - 'U in_state S, S name N').rows[0][0], - 'deactivated') - self.assertEqual(self.execute('Any N WHERE U login "adim", ' - 'U in_state S, S name N').rows[0][0], - 'activated') - # unfilter, syt should be activated again - config['user-filter'] = u'' - source.repo_source.update_config(source, config) - self.commit() - self.pull() - self.assertEqual(self.execute('Any N WHERE U login "syt", ' - 'U in_state S, S name N').rows[0][0], - 'activated') - self.assertEqual(self.execute('Any N WHERE U login "adim", ' - 'U in_state S, S name N').rows[0][0], - 'activated') + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(cnx.execute('Any N WHERE U login "syt", ' + 'U in_state S, S name N').rows[0][0], + 'deactivated') + self.assertEqual(cnx.execute('Any N WHERE U login "adim", ' + 'U in_state S, S name N').rows[0][0], + 'activated') + # unfilter, syt should be activated again + config['user-filter'] = u'' + source.repo_source.update_config(source, config) + cnx.commit() + with self.repo.internal_cnx() as cnx: + self.pull(cnx) + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(cnx.execute('Any N WHERE U login "syt", ' + 'U in_state S, S name N').rows[0][0], + 'activated') + self.assertEqual(cnx.execute('Any N WHERE U login "adim", ' + 'U in_state S, S name N').rows[0][0], + 'activated') def test_delete(self): """ delete syt, pull, check deactivation, repull, read syt, pull, check activation """ self.delete_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test') - self.pull() + with self.repo.internal_cnx() as cnx: + self.pull(cnx) self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt') - self.assertEqual(self.execute('Any N WHERE U login "syt", ' - 'U in_state S, S name N').rows[0][0], - 'deactivated') - # check that it doesn't choke - self.pull() + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(cnx.execute('Any N WHERE U login "syt", ' + 'U in_state S, S name N').rows[0][0], + 'deactivated') + with self.repo.internal_cnx() as cnx: + # check that it doesn't choke + self.pull(cnx) # reinsert syt self.add_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test', { 'objectClass': ['OpenLDAPperson','posixAccount','top','shadowAccount'], @@ -354,31 +362,38 @@ 'gecos': 'Sylvain Thenault', 'mail': ['sylvain.thenault@logilab.fr','syt@logilab.fr'], 'userPassword': 'syt', - }) - self.pull() - self.assertEqual(self.execute('Any N WHERE U login "syt", ' - 'U in_state S, S name N').rows[0][0], - 'activated') + }) + with self.repo.internal_cnx() as cnx: + self.pull(cnx) + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(cnx.execute('Any N WHERE U login "syt", ' + 'U in_state S, S name N').rows[0][0], + 'activated') def test_reactivate_deleted(self): # test reactivating BY HAND the user isn't enough to # authenticate, as the native source refuse to authenticate # user from other sources self.delete_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test') - self.pull() - # reactivate user (which source is still ldap-feed) - user = self.execute('CWUser U WHERE U login "syt"').get_entity(0, 0) - user.cw_adapt_to('IWorkflowable').fire_transition('activate') - self.commit() - with self.assertRaises(AuthenticationError): - self.repo.connect('syt', password='syt') + with self.repo.internal_cnx() as cnx: + self.pull(cnx) + with self.admin_access.repo_cnx() as cnx: + # reactivate user (which source is still ldap-feed) + user = cnx.execute('CWUser U WHERE U login "syt"').get_entity(0, 0) + user.cw_adapt_to('IWorkflowable').fire_transition('activate') + cnx.commit() + with self.assertRaises(AuthenticationError): + self.repo.connect('syt', password='syt') - # ok now let's try to make it a system user - self.sexecute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': user.eid}) - self.commit() + # ok now let's try to make it a system user + cnx.execute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': user.eid}) + cnx.commit() # and that we can now authenticate again self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='toto') - self.assertTrue(self.repo.connect('syt', password='syt')) + sessionid = self.repo.connect('syt', password='syt') + self.assertTrue(sessionid) + self.repo.close(sessionid) + class LDAPFeedGroupTC(LDAPFeedTestBase): """ @@ -386,44 +401,51 @@ """ def test_groups_exist(self): - rset = self.sexecute('CWGroup X WHERE X name "dir"') - self.assertEqual(len(rset), 1) + with self.admin_access.repo_cnx() as cnx: + rset = cnx.execute('CWGroup X WHERE X name "dir"') + self.assertEqual(len(rset), 1) - rset = self.sexecute('CWGroup X WHERE X cw_source S, S name "ldap"') - self.assertEqual(len(rset), 2) + rset = cnx.execute('CWGroup X WHERE X cw_source S, S name "ldap"') + self.assertEqual(len(rset), 2) def test_group_deleted(self): - rset = self.sexecute('CWGroup X WHERE X name "dir"') - self.assertEqual(len(rset), 1) + with self.admin_access.repo_cnx() as cnx: + rset = cnx.execute('CWGroup X WHERE X name "dir"') + self.assertEqual(len(rset), 1) def test_in_group(self): - rset = self.sexecute('CWGroup X WHERE X name %(name)s', {'name': 'dir'}) - dirgroup = rset.get_entity(0, 0) - self.assertEqual(set(['syt', 'adim']), - set([u.login for u in dirgroup.reverse_in_group])) - rset = self.sexecute('CWGroup X WHERE X name %(name)s', {'name': 'logilab'}) - logilabgroup = rset.get_entity(0, 0) - self.assertEqual(set(['adim']), - set([u.login for u in logilabgroup.reverse_in_group])) + with self.admin_access.repo_cnx() as cnx: + rset = cnx.execute('CWGroup X WHERE X name %(name)s', {'name': 'dir'}) + dirgroup = rset.get_entity(0, 0) + self.assertEqual(set(['syt', 'adim']), + set([u.login for u in dirgroup.reverse_in_group])) + rset = cnx.execute('CWGroup X WHERE X name %(name)s', {'name': 'logilab'}) + logilabgroup = rset.get_entity(0, 0) + self.assertEqual(set(['adim']), + set([u.login for u in logilabgroup.reverse_in_group])) def test_group_member_added(self): - self.pull() - rset = self.sexecute('Any L WHERE U in_group G, G name %(name)s, U login L', - {'name': 'logilab'}) - self.assertEqual(len(rset), 1) - self.assertEqual(rset[0][0], 'adim') + with self.repo.internal_cnx() as cnx: + self.pull(cnx) + with self.admin_access.repo_cnx() as cnx: + rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L', + {'name': 'logilab'}) + self.assertEqual(len(rset), 1) + self.assertEqual(rset[0][0], 'adim') try: self.update_ldap_entry('cn=logilab,ou=Group,dc=cubicweb,dc=test', - {('add', 'memberUid'): ['syt']}) + {('add', 'memberUid'): ['syt']}) time.sleep(1.1) # timestamps precision is 1s - self.pull() + with self.repo.internal_cnx() as cnx: + self.pull(cnx) - rset = self.sexecute('Any L WHERE U in_group G, G name %(name)s, U login L', - {'name': 'logilab'}) - self.assertEqual(len(rset), 2) - members = set([u[0] for u in rset]) - self.assertEqual(set(['adim', 'syt']), members) + with self.admin_access.repo_cnx() as cnx: + rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L', + {'name': 'logilab'}) + self.assertEqual(len(rset), 2) + members = set([u[0] for u in rset]) + self.assertEqual(set(['adim', 'syt']), members) finally: # back to normal ldap setup @@ -431,21 +453,25 @@ self.setUpClass() def test_group_member_deleted(self): - self.pull() # ensure we are sync'ed - rset = self.sexecute('Any L WHERE U in_group G, G name %(name)s, U login L', - {'name': 'logilab'}) - self.assertEqual(len(rset), 1) - self.assertEqual(rset[0][0], 'adim') + with self.repo.internal_cnx() as cnx: + self.pull(cnx) # ensure we are sync'ed + with self.admin_access.repo_cnx() as cnx: + rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L', + {'name': 'logilab'}) + self.assertEqual(len(rset), 1) + self.assertEqual(rset[0][0], 'adim') try: self.update_ldap_entry('cn=logilab,ou=Group,dc=cubicweb,dc=test', {('delete', 'memberUid'): ['adim']}) time.sleep(1.1) # timestamps precision is 1s - self.pull() + with self.repo.internal_cnx() as cnx: + self.pull(cnx) - rset = self.sexecute('Any L WHERE U in_group G, G name %(name)s, U login L', - {'name': 'logilab'}) - self.assertEqual(len(rset), 0) + with self.admin_access.repo_cnx() as cnx: + rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L', + {'name': 'logilab'}) + self.assertEqual(len(rset), 0) finally: # back to normal ldap setup self.tearDownClass() diff -r 84738d495ffd -r 793377697c81 server/test/unittest_migractions.py --- a/server/test/unittest_migractions.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_migractions.py Wed Sep 24 18:04:30 2014 +0200 @@ -19,16 +19,16 @@ from datetime import date from os.path import join +from contextlib import contextmanager -from logilab.common.testlib import TestCase, unittest_main, Tags, tag +from logilab.common.testlib import unittest_main, Tags, tag from yams.constraints import UniqueConstraint from cubicweb import ConfigurationError, ValidationError from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.schema import CubicWebSchemaLoader from cubicweb.server.sqlutils import SQL_PREFIX -from cubicweb.server.migractions import * +from cubicweb.server.migractions import ServerMigrationHelper import cubicweb.devtools @@ -45,489 +45,506 @@ tags = CubicWebTC.tags | Tags(('server', 'migration', 'migractions')) - @classmethod - def _init_repo(cls): - super(MigrationCommandsTC, cls)._init_repo() + def _init_repo(self): + super(MigrationCommandsTC, self)._init_repo() # we have to read schema from the database to get eid for schema entities - cls.repo.set_schema(cls.repo.deserialize_schema(), resetvreg=False) + self.repo.set_schema(self.repo.deserialize_schema(), resetvreg=False) # hack to read the schema from data/migrschema - config = cls.config + config = self.config config.appid = join('data', 'migratedapp') - config._apphome = cls.datapath('migratedapp') + config._apphome = self.datapath('migratedapp') global migrschema migrschema = config.load_schema() config.appid = 'data' - config._apphome = cls.datadir + config._apphome = self.datadir assert 'Folder' in migrschema def setUp(self): CubicWebTC.setUp(self) - self.mh = ServerMigrationHelper(self.repo.config, migrschema, - repo=self.repo, cnx=self.cnx, - interactive=False) - assert self.cnx is self.mh._cnx - assert self.session is self.mh.session, (self.session.id, self.mh.session.id) def tearDown(self): CubicWebTC.tearDown(self) self.repo.vreg['etypes'].clear_caches() + @contextmanager + def mh(self): + with self.admin_access.client_cnx() as cnx: + yield cnx, ServerMigrationHelper(self.repo.config, migrschema, + repo=self.repo, cnx=cnx, + interactive=False) + def test_add_attribute_bool(self): - self.assertFalse('yesno' in self.schema) - self.session.create_entity('Note') - self.commit() - self.mh.cmd_add_attribute('Note', 'yesno') - self.assertTrue('yesno' in self.schema) - self.assertEqual(self.schema['yesno'].subjects(), ('Note',)) - self.assertEqual(self.schema['yesno'].objects(), ('Boolean',)) - self.assertEqual(self.schema['Note'].default('yesno'), False) - # test default value set on existing entities - note = self.session.execute('Note X').get_entity(0, 0) - self.assertEqual(note.yesno, False) - # test default value set for next entities - self.assertEqual(self.session.create_entity('Note').yesno, False) - self.mh.rollback() + with self.mh() as (cnx, mh): + self.assertNotIn('yesno', self.schema) + cnx.create_entity('Note') + cnx.commit() + mh.cmd_add_attribute('Note', 'yesno') + self.assertIn('yesno', self.schema) + self.assertEqual(self.schema['yesno'].subjects(), ('Note',)) + self.assertEqual(self.schema['yesno'].objects(), ('Boolean',)) + self.assertEqual(self.schema['Note'].default('yesno'), False) + # test default value set on existing entities + note = cnx.execute('Note X').get_entity(0, 0) + self.assertEqual(note.yesno, False) + # test default value set for next entities + self.assertEqual(cnx.create_entity('Note').yesno, False) def test_add_attribute_int(self): - self.assertFalse('whatever' in self.schema) - self.session.create_entity('Note') - self.session.commit(free_cnxset=False) - orderdict = dict(self.mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, ' + with self.mh() as (cnx, mh): + self.assertNotIn('whatever', self.schema) + cnx.create_entity('Note') + cnx.commit() + orderdict = dict(mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, ' + 'RDEF relation_type RT, RDEF ordernum O, RT name RTN')) + mh.cmd_add_attribute('Note', 'whatever') + self.assertIn('whatever', self.schema) + self.assertEqual(self.schema['whatever'].subjects(), ('Note',)) + self.assertEqual(self.schema['whatever'].objects(), ('Int',)) + self.assertEqual(self.schema['Note'].default('whatever'), 0) + # test default value set on existing entities + note = cnx.execute('Note X').get_entity(0, 0) + self.assertIsInstance(note.whatever, int) + self.assertEqual(note.whatever, 0) + # test default value set for next entities + self.assertEqual(cnx.create_entity('Note').whatever, 0) + # test attribute order + orderdict2 = dict(mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, ' 'RDEF relation_type RT, RDEF ordernum O, RT name RTN')) - self.mh.cmd_add_attribute('Note', 'whatever') - self.assertTrue('whatever' in self.schema) - self.assertEqual(self.schema['whatever'].subjects(), ('Note',)) - self.assertEqual(self.schema['whatever'].objects(), ('Int',)) - self.assertEqual(self.schema['Note'].default('whatever'), 0) - # test default value set on existing entities - note = self.session.execute('Note X').get_entity(0, 0) - self.assertIsInstance(note.whatever, int) - self.assertEqual(note.whatever, 0) - # test default value set for next entities - self.assertEqual(self.session.create_entity('Note').whatever, 0) - # test attribute order - orderdict2 = dict(self.mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, ' - 'RDEF relation_type RT, RDEF ordernum O, RT name RTN')) - whateverorder = migrschema['whatever'].rdef('Note', 'Int').order - for k, v in orderdict.iteritems(): - if v >= whateverorder: - orderdict[k] = v+1 - orderdict['whatever'] = whateverorder - self.assertDictEqual(orderdict, orderdict2) - #self.assertEqual([r.type for r in self.schema['Note'].ordered_relations()], - # ['modification_date', 'creation_date', 'owned_by', - # 'eid', 'ecrit_par', 'inline1', 'date', 'type', - # 'whatever', 'date', 'in_basket']) - # NB: commit instead of rollback make following test fail with py2.5 - # this sounds like a pysqlite/2.5 bug (the same eid is affected to - # two different entities) - self.mh.rollback() + whateverorder = migrschema['whatever'].rdef('Note', 'Int').order + for k, v in orderdict.iteritems(): + if v >= whateverorder: + orderdict[k] = v+1 + orderdict['whatever'] = whateverorder + self.assertDictEqual(orderdict, orderdict2) + #self.assertEqual([r.type for r in self.schema['Note'].ordered_relations()], + # ['modification_date', 'creation_date', 'owned_by', + # 'eid', 'ecrit_par', 'inline1', 'date', 'type', + # 'whatever', 'date', 'in_basket']) + # NB: commit instead of rollback make following test fail with py2.5 + # this sounds like a pysqlite/2.5 bug (the same eid is affected to + # two different entities) def test_add_attribute_varchar(self): - self.assertFalse('whatever' in self.schema) - self.session.create_entity('Note') - self.session.commit(free_cnxset=False) - self.assertFalse('shortpara' in self.schema) - self.mh.cmd_add_attribute('Note', 'shortpara') - self.assertTrue('shortpara' in self.schema) - self.assertEqual(self.schema['shortpara'].subjects(), ('Note', )) - self.assertEqual(self.schema['shortpara'].objects(), ('String', )) - # test created column is actually a varchar(64) - notesql = self.mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' and name='%sNote'" % SQL_PREFIX)[0][0] - fields = dict(x.strip().split()[:2] for x in notesql.split('(', 1)[1].rsplit(')', 1)[0].split(',')) - self.assertEqual(fields['%sshortpara' % SQL_PREFIX], 'varchar(64)') - # test default value set on existing entities - self.assertEqual(self.session.execute('Note X').get_entity(0, 0).shortpara, 'hop') - # test default value set for next entities - self.assertEqual(self.session.create_entity('Note').shortpara, 'hop') - self.mh.rollback() + with self.mh() as (cnx, mh): + self.assertNotIn('whatever', self.schema) + cnx.create_entity('Note') + cnx.commit() + self.assertNotIn('shortpara', self.schema) + mh.cmd_add_attribute('Note', 'shortpara') + self.assertIn('shortpara', self.schema) + self.assertEqual(self.schema['shortpara'].subjects(), ('Note', )) + self.assertEqual(self.schema['shortpara'].objects(), ('String', )) + # test created column is actually a varchar(64) + notesql = mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' and name='%sNote'" % SQL_PREFIX)[0][0] + fields = dict(x.strip().split()[:2] for x in notesql.split('(', 1)[1].rsplit(')', 1)[0].split(',')) + self.assertEqual(fields['%sshortpara' % SQL_PREFIX], 'varchar(64)') + # test default value set on existing entities + self.assertEqual(cnx.execute('Note X').get_entity(0, 0).shortpara, 'hop') + # test default value set for next entities + self.assertEqual(cnx.create_entity('Note').shortpara, 'hop') def test_add_datetime_with_default_value_attribute(self): - self.assertFalse('mydate' in self.schema) - self.assertFalse('oldstyledefaultdate' in self.schema) - self.assertFalse('newstyledefaultdate' in self.schema) - self.mh.cmd_add_attribute('Note', 'mydate') - self.mh.cmd_add_attribute('Note', 'oldstyledefaultdate') - self.mh.cmd_add_attribute('Note', 'newstyledefaultdate') - self.assertTrue('mydate' in self.schema) - self.assertTrue('oldstyledefaultdate' in self.schema) - self.assertTrue('newstyledefaultdate' in self.schema) - self.assertEqual(self.schema['mydate'].subjects(), ('Note', )) - self.assertEqual(self.schema['mydate'].objects(), ('Date', )) - testdate = date(2005, 12, 13) - eid1 = self.mh.rqlexec('INSERT Note N')[0][0] - eid2 = self.mh.rqlexec('INSERT Note N: N mydate %(mydate)s', {'mydate' : testdate})[0][0] - d1 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1})[0][0] - d2 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2})[0][0] - d3 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X oldstyledefaultdate D', {'x': eid1})[0][0] - d4 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X newstyledefaultdate D', {'x': eid1})[0][0] - self.assertEqual(d1, date.today()) - self.assertEqual(d2, testdate) - myfavoritedate = date(2013, 1, 1) - self.assertEqual(d3, myfavoritedate) - self.assertEqual(d4, myfavoritedate) - self.mh.rollback() + with self.mh() as (cnx, mh): + self.assertNotIn('mydate', self.schema) + self.assertNotIn('oldstyledefaultdate', self.schema) + self.assertNotIn('newstyledefaultdate', self.schema) + mh.cmd_add_attribute('Note', 'mydate') + mh.cmd_add_attribute('Note', 'oldstyledefaultdate') + mh.cmd_add_attribute('Note', 'newstyledefaultdate') + self.assertIn('mydate', self.schema) + self.assertIn('oldstyledefaultdate', self.schema) + self.assertIn('newstyledefaultdate', self.schema) + self.assertEqual(self.schema['mydate'].subjects(), ('Note', )) + self.assertEqual(self.schema['mydate'].objects(), ('Date', )) + testdate = date(2005, 12, 13) + eid1 = mh.rqlexec('INSERT Note N')[0][0] + eid2 = mh.rqlexec('INSERT Note N: N mydate %(mydate)s', {'mydate' : testdate})[0][0] + d1 = mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1})[0][0] + d2 = mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2})[0][0] + d3 = mh.rqlexec('Any D WHERE X eid %(x)s, X oldstyledefaultdate D', {'x': eid1})[0][0] + d4 = mh.rqlexec('Any D WHERE X eid %(x)s, X newstyledefaultdate D', {'x': eid1})[0][0] + self.assertEqual(d1, date.today()) + self.assertEqual(d2, testdate) + myfavoritedate = date(2013, 1, 1) + self.assertEqual(d3, myfavoritedate) + self.assertEqual(d4, myfavoritedate) def test_drop_chosen_constraints_ctxmanager(self): - with self.mh.cmd_dropped_constraints('Note', 'unique_id', UniqueConstraint): - self.mh.cmd_add_attribute('Note', 'unique_id') - # make sure the maxsize constraint is not dropped - self.assertRaises(ValidationError, - self.mh.rqlexec, - 'INSERT Note N: N unique_id "xyz"') - self.mh.rollback() - # make sure the unique constraint is dropped - self.mh.rqlexec('INSERT Note N: N unique_id "x"') - self.mh.rqlexec('INSERT Note N: N unique_id "x"') - self.mh.rqlexec('DELETE Note N') - self.mh.rollback() + with self.mh() as (cnx, mh): + with mh.cmd_dropped_constraints('Note', 'unique_id', UniqueConstraint): + mh.cmd_add_attribute('Note', 'unique_id') + # make sure the maxsize constraint is not dropped + self.assertRaises(ValidationError, + mh.rqlexec, + 'INSERT Note N: N unique_id "xyz"') + mh.rollback() + # make sure the unique constraint is dropped + mh.rqlexec('INSERT Note N: N unique_id "x"') + mh.rqlexec('INSERT Note N: N unique_id "x"') + mh.rqlexec('DELETE Note N') def test_drop_required_ctxmanager(self): - with self.mh.cmd_dropped_constraints('Note', 'unique_id', cstrtype=None, - droprequired=True): - self.mh.cmd_add_attribute('Note', 'unique_id') - self.mh.rqlexec('INSERT Note N') - # make sure the required=True was restored - self.assertRaises(ValidationError, self.mh.rqlexec, 'INSERT Note N') - self.mh.rollback() + with self.mh() as (cnx, mh): + with mh.cmd_dropped_constraints('Note', 'unique_id', cstrtype=None, + droprequired=True): + mh.cmd_add_attribute('Note', 'unique_id') + mh.rqlexec('INSERT Note N') + # make sure the required=True was restored + self.assertRaises(ValidationError, mh.rqlexec, 'INSERT Note N') + mh.rollback() def test_rename_attribute(self): - self.assertFalse('civility' in self.schema) - eid1 = self.mh.rqlexec('INSERT Personne X: X nom "lui", X sexe "M"')[0][0] - eid2 = self.mh.rqlexec('INSERT Personne X: X nom "l\'autre", X sexe NULL')[0][0] - self.mh.cmd_rename_attribute('Personne', 'sexe', 'civility') - self.assertFalse('sexe' in self.schema) - self.assertTrue('civility' in self.schema) - # test data has been backported - c1 = self.mh.rqlexec('Any C WHERE X eid %s, X civility C' % eid1)[0][0] - self.assertEqual(c1, 'M') - c2 = self.mh.rqlexec('Any C WHERE X eid %s, X civility C' % eid2)[0][0] - self.assertEqual(c2, None) - + with self.mh() as (cnx, mh): + self.assertNotIn('civility', self.schema) + eid1 = mh.rqlexec('INSERT Personne X: X nom "lui", X sexe "M"')[0][0] + eid2 = mh.rqlexec('INSERT Personne X: X nom "l\'autre", X sexe NULL')[0][0] + mh.cmd_rename_attribute('Personne', 'sexe', 'civility') + self.assertNotIn('sexe', self.schema) + self.assertIn('civility', self.schema) + # test data has been backported + c1 = mh.rqlexec('Any C WHERE X eid %s, X civility C' % eid1)[0][0] + self.assertEqual(c1, 'M') + c2 = mh.rqlexec('Any C WHERE X eid %s, X civility C' % eid2)[0][0] + self.assertEqual(c2, None) def test_workflow_actions(self): - wf = self.mh.cmd_add_workflow(u'foo', ('Personne', 'Email'), - ensure_workflowable=False) - for etype in ('Personne', 'Email'): - s1 = self.mh.rqlexec('Any N WHERE WF workflow_of ET, ET name "%s", WF name N' % - etype)[0][0] - self.assertEqual(s1, "foo") - s1 = self.mh.rqlexec('Any N WHERE ET default_workflow WF, ET name "%s", WF name N' % - etype)[0][0] - self.assertEqual(s1, "foo") + with self.mh() as (cnx, mh): + wf = mh.cmd_add_workflow(u'foo', ('Personne', 'Email'), + ensure_workflowable=False) + for etype in ('Personne', 'Email'): + s1 = mh.rqlexec('Any N WHERE WF workflow_of ET, ET name "%s", WF name N' % + etype)[0][0] + self.assertEqual(s1, "foo") + s1 = mh.rqlexec('Any N WHERE ET default_workflow WF, ET name "%s", WF name N' % + etype)[0][0] + self.assertEqual(s1, "foo") def test_add_entity_type(self): - self.assertFalse('Folder2' in self.schema) - self.assertFalse('filed_under2' in self.schema) - self.mh.cmd_add_entity_type('Folder2') - self.assertTrue('Folder2' in self.schema) - self.assertTrue('Old' in self.schema) - self.assertTrue(self.session.execute('CWEType X WHERE X name "Folder2"')) - self.assertTrue('filed_under2' in self.schema) - self.assertTrue(self.session.execute('CWRType X WHERE X name "filed_under2"')) - self.assertEqual(sorted(str(rs) for rs in self.schema['Folder2'].subject_relations()), - ['created_by', 'creation_date', 'cw_source', 'cwuri', - 'description', 'description_format', - 'eid', - 'filed_under2', 'has_text', - 'identity', 'in_basket', 'is', 'is_instance_of', - 'modification_date', 'name', 'owned_by']) - self.assertEqual([str(rs) for rs in self.schema['Folder2'].object_relations()], - ['filed_under2', 'identity']) - # Old will be missing as it has been renamed into 'New' in the migrated - # schema while New hasn't been added here. - self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()), - sorted(str(e) for e in self.schema.entities() if not e.final and e != 'Old')) - self.assertEqual(self.schema['filed_under2'].objects(), ('Folder2',)) - eschema = self.schema.eschema('Folder2') - for cstr in eschema.rdef('name').constraints: - self.assertTrue(hasattr(cstr, 'eid')) + with self.mh() as (cnx, mh): + self.assertNotIn('Folder2', self.schema) + self.assertNotIn('filed_under2', self.schema) + mh.cmd_add_entity_type('Folder2') + self.assertIn('Folder2', self.schema) + self.assertIn('Old', self.schema) + self.assertTrue(cnx.execute('CWEType X WHERE X name "Folder2"')) + self.assertIn('filed_under2', self.schema) + self.assertTrue(cnx.execute('CWRType X WHERE X name "filed_under2"')) + self.assertEqual(sorted(str(rs) for rs in self.schema['Folder2'].subject_relations()), + ['created_by', 'creation_date', 'cw_source', 'cwuri', + 'description', 'description_format', + 'eid', + 'filed_under2', 'has_text', + 'identity', 'in_basket', 'is', 'is_instance_of', + 'modification_date', 'name', 'owned_by']) + self.assertEqual([str(rs) for rs in self.schema['Folder2'].object_relations()], + ['filed_under2', 'identity']) + # Old will be missing as it has been renamed into 'New' in the migrated + # schema while New hasn't been added here. + self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()), + sorted(str(e) for e in self.schema.entities() if not e.final and e != 'Old')) + self.assertEqual(self.schema['filed_under2'].objects(), ('Folder2',)) + eschema = self.schema.eschema('Folder2') + for cstr in eschema.rdef('name').constraints: + self.assertTrue(hasattr(cstr, 'eid')) def test_add_drop_entity_type(self): - self.mh.cmd_add_entity_type('Folder2') - wf = self.mh.cmd_add_workflow(u'folder2 wf', 'Folder2', - ensure_workflowable=False) - todo = wf.add_state(u'todo', initial=True) - done = wf.add_state(u'done') - wf.add_transition(u'redoit', done, todo) - wf.add_transition(u'markasdone', todo, done) - self.session.commit(free_cnxset=False) - eschema = self.schema.eschema('Folder2') - self.mh.cmd_drop_entity_type('Folder2') - self.assertFalse('Folder2' in self.schema) - self.assertFalse(self.session.execute('CWEType X WHERE X name "Folder2"')) - # test automatic workflow deletion - self.assertFalse(self.session.execute('Workflow X WHERE NOT X workflow_of ET')) - self.assertFalse(self.session.execute('State X WHERE NOT X state_of WF')) - self.assertFalse(self.session.execute('Transition X WHERE NOT X transition_of WF')) + with self.mh() as (cnx, mh): + mh.cmd_add_entity_type('Folder2') + wf = mh.cmd_add_workflow(u'folder2 wf', 'Folder2', + ensure_workflowable=False) + todo = wf.add_state(u'todo', initial=True) + done = wf.add_state(u'done') + wf.add_transition(u'redoit', done, todo) + wf.add_transition(u'markasdone', todo, done) + cnx.commit() + eschema = self.schema.eschema('Folder2') + mh.cmd_drop_entity_type('Folder2') + self.assertNotIn('Folder2', self.schema) + self.assertFalse(cnx.execute('CWEType X WHERE X name "Folder2"')) + # test automatic workflow deletion + self.assertFalse(cnx.execute('Workflow X WHERE NOT X workflow_of ET')) + self.assertFalse(cnx.execute('State X WHERE NOT X state_of WF')) + self.assertFalse(cnx.execute('Transition X WHERE NOT X transition_of WF')) def test_rename_entity_type(self): - entity = self.mh.create_entity('Old', name=u'old') - self.repo.type_and_source_from_eid(entity.eid) - self.mh.cmd_rename_entity_type('Old', 'New') - self.mh.cmd_rename_attribute('New', 'name', 'new_name') + with self.mh() as (cnx, mh): + entity = mh.create_entity('Old', name=u'old') + self.repo.type_and_source_from_eid(entity.eid, entity._cw) + mh.cmd_rename_entity_type('Old', 'New') + mh.cmd_rename_attribute('New', 'name', 'new_name') def test_add_drop_relation_type(self): - self.mh.cmd_add_entity_type('Folder2', auto=False) - self.mh.cmd_add_relation_type('filed_under2') - self.assertTrue('filed_under2' in self.schema) - # Old will be missing as it has been renamed into 'New' in the migrated - # schema while New hasn't been added here. - self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()), - sorted(str(e) for e in self.schema.entities() - if not e.final and e != 'Old')) - self.assertEqual(self.schema['filed_under2'].objects(), ('Folder2',)) - self.mh.cmd_drop_relation_type('filed_under2') - self.assertFalse('filed_under2' in self.schema) + with self.mh() as (cnx, mh): + mh.cmd_add_entity_type('Folder2', auto=False) + mh.cmd_add_relation_type('filed_under2') + self.assertIn('filed_under2', self.schema) + # Old will be missing as it has been renamed into 'New' in the migrated + # schema while New hasn't been added here. + self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()), + sorted(str(e) for e in self.schema.entities() + if not e.final and e != 'Old')) + self.assertEqual(self.schema['filed_under2'].objects(), ('Folder2',)) + mh.cmd_drop_relation_type('filed_under2') + self.assertNotIn('filed_under2', self.schema) def test_add_relation_definition_nortype(self): - self.mh.cmd_add_relation_definition('Personne', 'concerne2', 'Affaire') - self.assertEqual(self.schema['concerne2'].subjects(), - ('Personne',)) - self.assertEqual(self.schema['concerne2'].objects(), - ('Affaire', )) - self.assertEqual(self.schema['concerne2'].rdef('Personne', 'Affaire').cardinality, - '1*') - self.mh.cmd_add_relation_definition('Personne', 'concerne2', 'Note') - self.assertEqual(sorted(self.schema['concerne2'].objects()), ['Affaire', 'Note']) - self.mh.create_entity('Personne', nom=u'tot') - self.mh.create_entity('Affaire') - self.mh.rqlexec('SET X concerne2 Y WHERE X is Personne, Y is Affaire') - self.session.commit(free_cnxset=False) - self.mh.cmd_drop_relation_definition('Personne', 'concerne2', 'Affaire') - self.assertTrue('concerne2' in self.schema) - self.mh.cmd_drop_relation_definition('Personne', 'concerne2', 'Note') - self.assertFalse('concerne2' in self.schema) + with self.mh() as (cnx, mh): + mh.cmd_add_relation_definition('Personne', 'concerne2', 'Affaire') + self.assertEqual(self.schema['concerne2'].subjects(), + ('Personne',)) + self.assertEqual(self.schema['concerne2'].objects(), + ('Affaire', )) + self.assertEqual(self.schema['concerne2'].rdef('Personne', 'Affaire').cardinality, + '1*') + mh.cmd_add_relation_definition('Personne', 'concerne2', 'Note') + self.assertEqual(sorted(self.schema['concerne2'].objects()), ['Affaire', 'Note']) + mh.create_entity('Personne', nom=u'tot') + mh.create_entity('Affaire') + mh.rqlexec('SET X concerne2 Y WHERE X is Personne, Y is Affaire') + cnx.commit() + mh.cmd_drop_relation_definition('Personne', 'concerne2', 'Affaire') + self.assertIn('concerne2', self.schema) + mh.cmd_drop_relation_definition('Personne', 'concerne2', 'Note') + self.assertNotIn('concerne2', self.schema) def test_drop_relation_definition_existant_rtype(self): - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), - ['Affaire', 'Personne']) - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), - ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) - self.mh.cmd_drop_relation_definition('Personne', 'concerne', 'Affaire') - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), - ['Affaire']) - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), - ['Division', 'Note', 'Societe', 'SubDivision']) - self.mh.cmd_add_relation_definition('Personne', 'concerne', 'Affaire') - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), - ['Affaire', 'Personne']) - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), - ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) - # trick: overwrite self.maxeid to avoid deletion of just reintroduced types - self.maxeid = self.session.execute('Any MAX(X)')[0][0] + with self.mh() as (cnx, mh): + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), + ['Affaire', 'Personne']) + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), + ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) + mh.cmd_drop_relation_definition('Personne', 'concerne', 'Affaire') + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), + ['Affaire']) + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), + ['Division', 'Note', 'Societe', 'SubDivision']) + mh.cmd_add_relation_definition('Personne', 'concerne', 'Affaire') + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), + ['Affaire', 'Personne']) + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), + ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) + # trick: overwrite self.maxeid to avoid deletion of just reintroduced types + self.maxeid = cnx.execute('Any MAX(X)')[0][0] def test_drop_relation_definition_with_specialization(self): - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), - ['Affaire', 'Personne']) - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), - ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) - self.mh.cmd_drop_relation_definition('Affaire', 'concerne', 'Societe') - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), - ['Affaire', 'Personne']) - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), - ['Affaire', 'Note']) - self.mh.cmd_add_relation_definition('Affaire', 'concerne', 'Societe') - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), - ['Affaire', 'Personne']) - self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), - ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) - # trick: overwrite self.maxeid to avoid deletion of just reintroduced types - self.maxeid = self.session.execute('Any MAX(X)')[0][0] + with self.mh() as (cnx, mh): + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), + ['Affaire', 'Personne']) + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), + ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) + mh.cmd_drop_relation_definition('Affaire', 'concerne', 'Societe') + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), + ['Affaire', 'Personne']) + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), + ['Affaire', 'Note']) + mh.cmd_add_relation_definition('Affaire', 'concerne', 'Societe') + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), + ['Affaire', 'Personne']) + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), + ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) + # trick: overwrite self.maxeid to avoid deletion of just reintroduced types + self.maxeid = cnx.execute('Any MAX(X)')[0][0] def test_rename_relation(self): self.skipTest('implement me') def test_change_relation_props_non_final(self): - rschema = self.schema['concerne'] - card = rschema.rdef('Affaire', 'Societe').cardinality - self.assertEqual(card, '**') - try: - self.mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe', - cardinality='?*') + with self.mh() as (cnx, mh): + rschema = self.schema['concerne'] card = rschema.rdef('Affaire', 'Societe').cardinality - self.assertEqual(card, '?*') - finally: - self.mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe', - cardinality='**') + self.assertEqual(card, '**') + try: + mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe', + cardinality='?*') + card = rschema.rdef('Affaire', 'Societe').cardinality + self.assertEqual(card, '?*') + finally: + mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe', + cardinality='**') def test_change_relation_props_final(self): - rschema = self.schema['adel'] - card = rschema.rdef('Personne', 'String').fulltextindexed - self.assertEqual(card, False) - try: - self.mh.cmd_change_relation_props('Personne', 'adel', 'String', - fulltextindexed=True) + with self.mh() as (cnx, mh): + rschema = self.schema['adel'] card = rschema.rdef('Personne', 'String').fulltextindexed - self.assertEqual(card, True) - finally: - self.mh.cmd_change_relation_props('Personne', 'adel', 'String', - fulltextindexed=False) + self.assertEqual(card, False) + try: + mh.cmd_change_relation_props('Personne', 'adel', 'String', + fulltextindexed=True) + card = rschema.rdef('Personne', 'String').fulltextindexed + self.assertEqual(card, True) + finally: + mh.cmd_change_relation_props('Personne', 'adel', 'String', + fulltextindexed=False) def test_sync_schema_props_perms_rqlconstraints(self): - # Drop one of the RQLConstraint. - rdef = self.schema['evaluee'].rdefs[('Personne', 'Note')] - oldconstraints = rdef.constraints - self.assertIn('S created_by U', - [cstr.expression for cstr in oldconstraints]) - self.mh.cmd_sync_schema_props_perms('evaluee', commit=True) - newconstraints = rdef.constraints - self.assertNotIn('S created_by U', - [cstr.expression for cstr in newconstraints]) + with self.mh() as (cnx, mh): + # Drop one of the RQLConstraint. + rdef = self.schema['evaluee'].rdefs[('Personne', 'Note')] + oldconstraints = rdef.constraints + self.assertIn('S created_by U', + [cstr.expression for cstr in oldconstraints]) + mh.cmd_sync_schema_props_perms('evaluee', commit=True) + newconstraints = rdef.constraints + self.assertNotIn('S created_by U', + [cstr.expression for cstr in newconstraints]) - # Drop all RQLConstraint. - rdef = self.schema['travaille'].rdefs[('Personne', 'Societe')] - oldconstraints = rdef.constraints - self.assertEqual(len(oldconstraints), 2) - self.mh.cmd_sync_schema_props_perms('travaille', commit=True) - rdef = self.schema['travaille'].rdefs[('Personne', 'Societe')] - newconstraints = rdef.constraints - self.assertEqual(len(newconstraints), 0) + # Drop all RQLConstraint. + rdef = self.schema['travaille'].rdefs[('Personne', 'Societe')] + oldconstraints = rdef.constraints + self.assertEqual(len(oldconstraints), 2) + mh.cmd_sync_schema_props_perms('travaille', commit=True) + rdef = self.schema['travaille'].rdefs[('Personne', 'Societe')] + newconstraints = rdef.constraints + self.assertEqual(len(newconstraints), 0) @tag('longrun') def test_sync_schema_props_perms(self): - cursor = self.mh.session - cursor.set_cnxset() - nbrqlexpr_start = cursor.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0] - migrschema['titre'].rdefs[('Personne', 'String')].order = 7 - migrschema['adel'].rdefs[('Personne', 'String')].order = 6 - migrschema['ass'].rdefs[('Personne', 'String')].order = 5 - migrschema['Personne'].description = 'blabla bla' - migrschema['titre'].description = 'usually a title' - migrschema['titre'].rdefs[('Personne', 'String')].description = 'title for this person' - delete_concerne_rqlexpr = self._rrqlexpr_rset('delete', 'concerne') - add_concerne_rqlexpr = self._rrqlexpr_rset('add', 'concerne') + with self.mh() as (cnx, mh): + nbrqlexpr_start = cnx.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0] + migrschema['titre'].rdefs[('Personne', 'String')].order = 7 + migrschema['adel'].rdefs[('Personne', 'String')].order = 6 + migrschema['ass'].rdefs[('Personne', 'String')].order = 5 + migrschema['Personne'].description = 'blabla bla' + migrschema['titre'].description = 'usually a title' + migrschema['titre'].rdefs[('Personne', 'String')].description = 'title for this person' + delete_concerne_rqlexpr = self._rrqlexpr_rset(cnx, 'delete', 'concerne') + add_concerne_rqlexpr = self._rrqlexpr_rset(cnx, 'add', 'concerne') - self.mh.cmd_sync_schema_props_perms(commit=False) + mh.cmd_sync_schema_props_perms(commit=False) - self.assertEqual(cursor.execute('Any D WHERE X name "Personne", X description D')[0][0], - 'blabla bla') - self.assertEqual(cursor.execute('Any D WHERE X name "titre", X description D')[0][0], - 'usually a title') - self.assertEqual(cursor.execute('Any D WHERE X relation_type RT, RT name "titre",' + self.assertEqual(cnx.execute('Any D WHERE X name "Personne", X description D')[0][0], + 'blabla bla') + self.assertEqual(cnx.execute('Any D WHERE X name "titre", X description D')[0][0], + 'usually a title') + self.assertEqual(cnx.execute('Any D WHERE X relation_type RT, RT name "titre",' 'X from_entity FE, FE name "Personne",' 'X description D')[0][0], - 'title for this person') - rinorder = [n for n, in cursor.execute( - 'Any N ORDERBY O,N WHERE X is CWAttribute, X relation_type RT, RT name N,' - 'X from_entity FE, FE name "Personne",' - 'X ordernum O')] - expected = [u'nom', u'prenom', u'sexe', u'promo', u'ass', u'adel', u'titre', - u'web', u'tel', u'fax', u'datenaiss', u'test', u'tzdatenaiss', - u'description', u'firstname', - u'creation_date', u'cwuri', u'modification_date'] - self.assertEqual(expected, rinorder) + 'title for this person') + rinorder = [n for n, in cnx.execute( + 'Any N ORDERBY O,N WHERE X is CWAttribute, X relation_type RT, RT name N,' + 'X from_entity FE, FE name "Personne",' + 'X ordernum O')] + expected = [u'nom', u'prenom', u'sexe', u'promo', u'ass', u'adel', u'titre', + u'web', u'tel', u'fax', u'datenaiss', u'test', u'tzdatenaiss', + u'description', u'firstname', + u'creation_date', u'cwuri', u'modification_date'] + self.assertEqual(expected, rinorder) - # test permissions synchronization #################################### - # new rql expr to add note entity - eexpr = self._erqlexpr_entity('add', 'Note') - self.assertEqual(eexpr.expression, - 'X ecrit_part PE, U in_group G, ' - 'PE require_permission P, P name "add_note", P require_group G') - self.assertEqual([et.name for et in eexpr.reverse_add_permission], ['Note']) - self.assertEqual(eexpr.reverse_read_permission, ()) - self.assertEqual(eexpr.reverse_delete_permission, ()) - self.assertEqual(eexpr.reverse_update_permission, ()) - self.assertTrue(self._rrqlexpr_rset('add', 'para')) - # no rqlexpr to delete para attribute - self.assertFalse(self._rrqlexpr_rset('delete', 'para')) - # new rql expr to add ecrit_par relation - rexpr = self._rrqlexpr_entity('add', 'ecrit_par') - self.assertEqual(rexpr.expression, - 'O require_permission P, P name "add_note", ' - 'U in_group G, P require_group G') - self.assertEqual([rdef.rtype.name for rdef in rexpr.reverse_add_permission], ['ecrit_par']) - self.assertEqual(rexpr.reverse_read_permission, ()) - self.assertEqual(rexpr.reverse_delete_permission, ()) - # no more rqlexpr to delete and add travaille relation - self.assertFalse(self._rrqlexpr_rset('add', 'travaille')) - self.assertFalse(self._rrqlexpr_rset('delete', 'travaille')) - # no more rqlexpr to delete and update Societe entity - self.assertFalse(self._erqlexpr_rset('update', 'Societe')) - self.assertFalse(self._erqlexpr_rset('delete', 'Societe')) - # no more rqlexpr to read Affaire entity - self.assertFalse(self._erqlexpr_rset('read', 'Affaire')) - # rqlexpr to update Affaire entity has been updated - eexpr = self._erqlexpr_entity('update', 'Affaire') - self.assertEqual(eexpr.expression, 'X concerne S, S owned_by U') - # no change for rqlexpr to add and delete Affaire entity - self.assertEqual(len(self._erqlexpr_rset('delete', 'Affaire')), 1) - self.assertEqual(len(self._erqlexpr_rset('add', 'Affaire')), 1) - # no change for rqlexpr to add and delete concerne relation - self.assertEqual(len(self._rrqlexpr_rset('delete', 'concerne')), len(delete_concerne_rqlexpr)) - self.assertEqual(len(self._rrqlexpr_rset('add', 'concerne')), len(add_concerne_rqlexpr)) - # * migrschema involve: - # * 7 erqlexprs deletions (2 in (Affaire + Societe + Note.para) + 1 Note.something - # * 2 rrqlexprs deletions (travaille) - # * 1 update (Affaire update) - # * 2 new (Note add, ecrit_par add) - # * 2 implicit new for attributes (Note.para, Person.test) - # remaining orphan rql expr which should be deleted at commit (composite relation) - # unattached expressions -> pending deletion on commit - self.assertEqual(cursor.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "ERQLExpression",' - 'NOT ET1 read_permission X, NOT ET2 add_permission X, ' - 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0], - 7) - self.assertEqual(cursor.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "RRQLExpression",' - 'NOT ET1 read_permission X, NOT ET2 add_permission X, ' - 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0], - 2) - # finally - self.assertEqual(cursor.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0], - nbrqlexpr_start + 1 + 2 + 2 + 2) - self.mh.commit() - # unique_together test - self.assertEqual(len(self.schema.eschema('Personne')._unique_together), 1) - self.assertCountEqual(self.schema.eschema('Personne')._unique_together[0], - ('nom', 'prenom', 'datenaiss')) - rset = cursor.execute('Any C WHERE C is CWUniqueTogetherConstraint, C constraint_of ET, ET name "Personne"') - self.assertEqual(len(rset), 1) - relations = [r.name for r in rset.get_entity(0, 0).relations] - self.assertCountEqual(relations, ('nom', 'prenom', 'datenaiss')) + # test permissions synchronization #################################### + # new rql expr to add note entity + eexpr = self._erqlexpr_entity(cnx, 'add', 'Note') + self.assertEqual(eexpr.expression, + 'X ecrit_part PE, U in_group G, ' + 'PE require_permission P, P name "add_note", P require_group G') + self.assertEqual([et.name for et in eexpr.reverse_add_permission], ['Note']) + self.assertEqual(eexpr.reverse_read_permission, ()) + self.assertEqual(eexpr.reverse_delete_permission, ()) + self.assertEqual(eexpr.reverse_update_permission, ()) + self.assertTrue(self._rrqlexpr_rset(cnx, 'add', 'para')) + # no rqlexpr to delete para attribute + self.assertFalse(self._rrqlexpr_rset(cnx, 'delete', 'para')) + # new rql expr to add ecrit_par relation + rexpr = self._rrqlexpr_entity(cnx, 'add', 'ecrit_par') + self.assertEqual(rexpr.expression, + 'O require_permission P, P name "add_note", ' + 'U in_group G, P require_group G') + self.assertEqual([rdef.rtype.name for rdef in rexpr.reverse_add_permission], ['ecrit_par']) + self.assertEqual(rexpr.reverse_read_permission, ()) + self.assertEqual(rexpr.reverse_delete_permission, ()) + # no more rqlexpr to delete and add travaille relation + self.assertFalse(self._rrqlexpr_rset(cnx, 'add', 'travaille')) + self.assertFalse(self._rrqlexpr_rset(cnx, 'delete', 'travaille')) + # no more rqlexpr to delete and update Societe entity + self.assertFalse(self._erqlexpr_rset(cnx, 'update', 'Societe')) + self.assertFalse(self._erqlexpr_rset(cnx, 'delete', 'Societe')) + # no more rqlexpr to read Affaire entity + self.assertFalse(self._erqlexpr_rset(cnx, 'read', 'Affaire')) + # rqlexpr to update Affaire entity has been updated + eexpr = self._erqlexpr_entity(cnx, 'update', 'Affaire') + self.assertEqual(eexpr.expression, 'X concerne S, S owned_by U') + # no change for rqlexpr to add and delete Affaire entity + self.assertEqual(len(self._erqlexpr_rset(cnx, 'delete', 'Affaire')), 1) + self.assertEqual(len(self._erqlexpr_rset(cnx, 'add', 'Affaire')), 1) + # no change for rqlexpr to add and delete concerne relation + self.assertEqual(len(self._rrqlexpr_rset(cnx, 'delete', 'concerne')), + len(delete_concerne_rqlexpr)) + self.assertEqual(len(self._rrqlexpr_rset(cnx, 'add', 'concerne')), + len(add_concerne_rqlexpr)) + # * migrschema involve: + # * 7 erqlexprs deletions (2 in (Affaire + Societe + Note.para) + 1 Note.something + # * 2 rrqlexprs deletions (travaille) + # * 1 update (Affaire update) + # * 2 new (Note add, ecrit_par add) + # * 2 implicit new for attributes (Note.para, Person.test) + # remaining orphan rql expr which should be deleted at commit (composite relation) + # unattached expressions -> pending deletion on commit + self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "ERQLExpression",' + 'NOT ET1 read_permission X, NOT ET2 add_permission X, ' + 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0], + 7) + self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "RRQLExpression",' + 'NOT ET1 read_permission X, NOT ET2 add_permission X, ' + 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0], + 2) + # finally + self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0], + nbrqlexpr_start + 1 + 2 + 2 + 2) + cnx.commit() + # unique_together test + self.assertEqual(len(self.schema.eschema('Personne')._unique_together), 1) + self.assertCountEqual(self.schema.eschema('Personne')._unique_together[0], + ('nom', 'prenom', 'datenaiss')) + rset = cnx.execute('Any C WHERE C is CWUniqueTogetherConstraint, C constraint_of ET, ET name "Personne"') + self.assertEqual(len(rset), 1) + relations = [r.name for r in rset.get_entity(0, 0).relations] + self.assertCountEqual(relations, ('nom', 'prenom', 'datenaiss')) - def _erqlexpr_rset(self, action, ertype): + def _erqlexpr_rset(self, cnx, action, ertype): rql = 'RQLExpression X WHERE ET is CWEType, ET %s_permission X, ET name %%(name)s' % action - return self.mh.session.execute(rql, {'name': ertype}) - def _erqlexpr_entity(self, action, ertype): - rset = self._erqlexpr_rset(action, ertype) + return cnx.execute(rql, {'name': ertype}) + + def _erqlexpr_entity(self, cnx, action, ertype): + rset = self._erqlexpr_rset(cnx, action, ertype) self.assertEqual(len(rset), 1) return rset.get_entity(0, 0) - def _rrqlexpr_rset(self, action, ertype): + + def _rrqlexpr_rset(self, cnx, action, ertype): rql = 'RQLExpression X WHERE RT is CWRType, RDEF %s_permission X, RT name %%(name)s, RDEF relation_type RT' % action - return self.mh.session.execute(rql, {'name': ertype}) - def _rrqlexpr_entity(self, action, ertype): - rset = self._rrqlexpr_rset(action, ertype) + return cnx.execute(rql, {'name': ertype}) + + def _rrqlexpr_entity(self, cnx, action, ertype): + rset = self._rrqlexpr_rset(cnx, action, ertype) self.assertEqual(len(rset), 1) return rset.get_entity(0, 0) def test_set_size_constraint(self): - # existing previous value - try: - self.mh.cmd_set_size_constraint('CWEType', 'name', 128) - finally: - self.mh.cmd_set_size_constraint('CWEType', 'name', 64) - # non existing previous value - try: - self.mh.cmd_set_size_constraint('CWEType', 'description', 256) - finally: - self.mh.cmd_set_size_constraint('CWEType', 'description', None) + with self.mh() as (cnx, mh): + # existing previous value + try: + mh.cmd_set_size_constraint('CWEType', 'name', 128) + finally: + mh.cmd_set_size_constraint('CWEType', 'name', 64) + # non existing previous value + try: + mh.cmd_set_size_constraint('CWEType', 'description', 256) + finally: + mh.cmd_set_size_constraint('CWEType', 'description', None) @tag('longrun') def test_add_remove_cube_and_deps(self): - cubes = set(self.config.cubes()) - schema = self.repo.schema - self.assertEqual(sorted((str(s), str(o)) for s, o in schema['see_also'].rdefs.iterkeys()), - sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'), - ('Bookmark', 'Bookmark'), ('Bookmark', 'Note'), - ('Note', 'Note'), ('Note', 'Bookmark')])) - try: + with self.mh() as (cnx, mh): + schema = self.repo.schema + self.assertEqual(sorted((str(s), str(o)) for s, o in schema['see_also'].rdefs.iterkeys()), + sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'), + ('Bookmark', 'Bookmark'), ('Bookmark', 'Note'), + ('Note', 'Note'), ('Note', 'Bookmark')])) try: - self.mh.cmd_remove_cube('email', removedeps=True) + mh.cmd_remove_cube('email', removedeps=True) # file was there because it's an email dependancy, should have been removed - self.assertFalse('email' in self.config.cubes()) - self.assertFalse(self.config.cube_dir('email') in self.config.cubes_path()) - self.assertFalse('file' in self.config.cubes()) - self.assertFalse(self.config.cube_dir('file') in self.config.cubes_path()) + self.assertNotIn('email', self.config.cubes()) + self.assertNotIn(self.config.cube_dir('email'), self.config.cubes_path()) + self.assertNotIn('file', self.config.cubes()) + self.assertNotIn(self.config.cube_dir('file'), self.config.cubes_path()) for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'sender', 'in_thread', 'reply_to', 'data_format'): self.assertFalse(ertype in schema, ertype) @@ -539,121 +556,116 @@ ('Note', 'Bookmark')])) self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'Folder', 'Note']) self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'Folder', 'Note']) - self.assertEqual(self.session.execute('Any X WHERE X pkey "system.version.email"').rowcount, 0) - self.assertEqual(self.session.execute('Any X WHERE X pkey "system.version.file"').rowcount, 0) - except : - import traceback - traceback.print_exc() - raise - finally: - self.mh.cmd_add_cube('email') - self.assertTrue('email' in self.config.cubes()) - self.assertTrue(self.config.cube_dir('email') in self.config.cubes_path()) - self.assertTrue('file' in self.config.cubes()) - self.assertTrue(self.config.cube_dir('file') in self.config.cubes_path()) - for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', - 'sender', 'in_thread', 'reply_to', 'data_format'): - self.assertTrue(ertype in schema, ertype) - self.assertEqual(sorted(schema['see_also'].rdefs.iterkeys()), - sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'), - ('Bookmark', 'Bookmark'), - ('Bookmark', 'Note'), - ('Note', 'Note'), - ('Note', 'Bookmark')])) - self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'EmailThread', 'Folder', 'Note']) - self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'EmailThread', 'Folder', 'Note']) - from cubes.email.__pkginfo__ import version as email_version - from cubes.file.__pkginfo__ import version as file_version - self.assertEqual(self.session.execute('Any V WHERE X value V, X pkey "system.version.email"')[0][0], - email_version) - self.assertEqual(self.session.execute('Any V WHERE X value V, X pkey "system.version.file"')[0][0], - file_version) - # trick: overwrite self.maxeid to avoid deletion of just reintroduced - # types (and their associated tables!) - self.maxeid = self.session.execute('Any MAX(X)')[0][0] - # why this commit is necessary is unclear to me (though without it - # next test may fail complaining of missing tables - self.session.commit(free_cnxset=False) + self.assertEqual(cnx.execute('Any X WHERE X pkey "system.version.email"').rowcount, 0) + self.assertEqual(cnx.execute('Any X WHERE X pkey "system.version.file"').rowcount, 0) + finally: + mh.cmd_add_cube('email') + self.assertIn('email', self.config.cubes()) + self.assertIn(self.config.cube_dir('email'), self.config.cubes_path()) + self.assertIn('file', self.config.cubes()) + self.assertIn(self.config.cube_dir('file'), self.config.cubes_path()) + for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', + 'sender', 'in_thread', 'reply_to', 'data_format'): + self.assertTrue(ertype in schema, ertype) + self.assertEqual(sorted(schema['see_also'].rdefs.iterkeys()), + sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'), + ('Bookmark', 'Bookmark'), + ('Bookmark', 'Note'), + ('Note', 'Note'), + ('Note', 'Bookmark')])) + self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'EmailThread', 'Folder', 'Note']) + self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'EmailThread', 'Folder', 'Note']) + from cubes.email.__pkginfo__ import version as email_version + from cubes.file.__pkginfo__ import version as file_version + self.assertEqual(cnx.execute('Any V WHERE X value V, X pkey "system.version.email"')[0][0], + email_version) + self.assertEqual(cnx.execute('Any V WHERE X value V, X pkey "system.version.file"')[0][0], + file_version) + # trick: overwrite self.maxeid to avoid deletion of just reintroduced + # types (and their associated tables!) + self.maxeid = cnx.execute('Any MAX(X)')[0][0] + # why this commit is necessary is unclear to me (though without it + # next test may fail complaining of missing tables + cnx.commit() @tag('longrun') def test_add_remove_cube_no_deps(self): - cubes = set(self.config.cubes()) - schema = self.repo.schema - try: + with self.mh() as (cnx, mh): + cubes = set(self.config.cubes()) + schema = self.repo.schema try: - self.mh.cmd_remove_cube('email') + mh.cmd_remove_cube('email') cubes.remove('email') - self.assertFalse('email' in self.config.cubes()) - self.assertTrue('file' in self.config.cubes()) + self.assertNotIn('email', self.config.cubes()) + self.assertIn('file', self.config.cubes()) for ertype in ('Email', 'EmailThread', 'EmailPart', 'sender', 'in_thread', 'reply_to'): self.assertFalse(ertype in schema, ertype) - except : - import traceback - traceback.print_exc() - raise - finally: - self.mh.cmd_add_cube('email') - self.assertTrue('email' in self.config.cubes()) - # trick: overwrite self.maxeid to avoid deletion of just reintroduced - # types (and their associated tables!) - self.maxeid = self.session.execute('Any MAX(X)')[0][0] - # why this commit is necessary is unclear to me (though without it - # next test may fail complaining of missing tables - self.session.commit(free_cnxset=False) + finally: + mh.cmd_add_cube('email') + self.assertIn('email', self.config.cubes()) + # trick: overwrite self.maxeid to avoid deletion of just reintroduced + # types (and their associated tables!) + self.maxeid = cnx.execute('Any MAX(X)')[0][0] # XXXXXXX KILL KENNY + # why this commit is necessary is unclear to me (though without it + # next test may fail complaining of missing tables + cnx.commit() def test_remove_dep_cube(self): - with self.assertRaises(ConfigurationError) as cm: - self.mh.cmd_remove_cube('file') - self.assertEqual(str(cm.exception), "can't remove cube file, used as a dependency") + with self.mh() as (cnx, mh): + with self.assertRaises(ConfigurationError) as cm: + mh.cmd_remove_cube('file') + self.assertEqual(str(cm.exception), "can't remove cube file, used as a dependency") @tag('longrun') def test_introduce_base_class(self): - self.mh.cmd_add_entity_type('Para') - self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), - ['Note']) - self.assertEqual(self.schema['Note'].specializes().type, 'Para') - self.mh.cmd_add_entity_type('Text') - self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), - ['Note', 'Text']) - self.assertEqual(self.schema['Text'].specializes().type, 'Para') - # test columns have been actually added - text = self.session.execute('INSERT Text X: X para "hip", X summary "hop", X newattr "momo"').get_entity(0, 0) - note = self.session.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo", X unique_id "x"').get_entity(0, 0) - aff = self.session.execute('INSERT Affaire X').get_entity(0, 0) - self.assertTrue(self.session.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': text.eid, 'y': aff.eid})) - self.assertTrue(self.session.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': note.eid, 'y': aff.eid})) - self.assertTrue(self.session.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': text.eid, 'y': aff.eid})) - self.assertTrue(self.session.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': note.eid, 'y': aff.eid})) - # XXX remove specializes by ourselves, else tearDown fails when removing - # Para because of Note inheritance. This could be fixed by putting the - # MemSchemaCWETypeDel(session, name) operation in the - # after_delete_entity(CWEType) hook, since in that case the MemSchemaSpecializesDel - # operation would be removed before, but I'm not sure this is a desired behaviour. - # - # also we need more tests about introducing/removing base classes or - # specialization relationship... - self.session.execute('DELETE X specializes Y WHERE Y name "Para"') - self.session.commit(free_cnxset=False) - self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), - []) - self.assertEqual(self.schema['Note'].specializes(), None) - self.assertEqual(self.schema['Text'].specializes(), None) + with self.mh() as (cnx, mh): + mh.cmd_add_entity_type('Para') + self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), + ['Note']) + self.assertEqual(self.schema['Note'].specializes().type, 'Para') + mh.cmd_add_entity_type('Text') + self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), + ['Note', 'Text']) + self.assertEqual(self.schema['Text'].specializes().type, 'Para') + # test columns have been actually added + text = cnx.execute('INSERT Text X: X para "hip", X summary "hop", X newattr "momo"').get_entity(0, 0) + note = cnx.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo", X unique_id "x"').get_entity(0, 0) + aff = cnx.execute('INSERT Affaire X').get_entity(0, 0) + self.assertTrue(cnx.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s', + {'x': text.eid, 'y': aff.eid})) + self.assertTrue(cnx.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s', + {'x': note.eid, 'y': aff.eid})) + self.assertTrue(cnx.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s', + {'x': text.eid, 'y': aff.eid})) + self.assertTrue(cnx.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s', + {'x': note.eid, 'y': aff.eid})) + # XXX remove specializes by ourselves, else tearDown fails when removing + # Para because of Note inheritance. This could be fixed by putting the + # MemSchemaCWETypeDel(session, name) operation in the + # after_delete_entity(CWEType) hook, since in that case the MemSchemaSpecializesDel + # operation would be removed before, but I'm not sure this is a desired behaviour. + # + # also we need more tests about introducing/removing base classes or + # specialization relationship... + cnx.execute('DELETE X specializes Y WHERE Y name "Para"') + cnx.commit() + self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), + []) + self.assertEqual(self.schema['Note'].specializes(), None) + self.assertEqual(self.schema['Text'].specializes(), None) def test_add_symmetric_relation_type(self): - same_as_sql = self.mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' " - "and name='same_as_relation'") - self.assertFalse(same_as_sql) - self.mh.cmd_add_relation_type('same_as') - same_as_sql = self.mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' " - "and name='same_as_relation'") - self.assertTrue(same_as_sql) + with self.mh() as (cnx, mh): + same_as_sql = mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' " + "and name='same_as_relation'") + self.assertFalse(same_as_sql) + mh.cmd_add_relation_type('same_as') + same_as_sql = mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' " + "and name='same_as_relation'") + self.assertTrue(same_as_sql) if __name__ == '__main__': unittest_main() diff -r 84738d495ffd -r 793377697c81 server/test/unittest_msplanner.py --- a/server/test/unittest_msplanner.py Wed Sep 24 17:35:59 2014 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2812 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""unit tests for module cubicweb.server.msplanner""" - -from logilab.common.decorators import clear_cache -from yams.buildobjs import RelationDefinition -from rql import BadRQLQuery - -from cubicweb.devtools import get_test_db_handler, TestServerConfiguration -from cubicweb.devtools.repotest import BasePlannerTC, test_plan - -class _SetGenerator(object): - """singleton to easily create set using "s[0]" or "s[0,1,2]" for instance - """ - def __getitem__(self, key): - try: - it = iter(key) - except TypeError: - it = (key,) - return set(it) -s = _SetGenerator() - -from cubicweb.schema import ERQLExpression -from cubicweb.server.sources import AbstractSource -from cubicweb.server.msplanner import MSPlanner, PartPlanInformation - -class FakeUserROSource(AbstractSource): - support_entities = {'CWUser': False} - support_relations = {} - def syntax_tree_search(self, *args, **kwargs): - return [] - - -class FakeCardSource(AbstractSource): - support_entities = {'Card': True, 'Note': True, 'State': True} - support_relations = {'in_state': True, 'multisource_rel': True, 'multisource_inlined_rel': True, - 'multisource_crossed_rel': True,} - dont_cross_relations = set(('fiche', 'state_of')) - cross_relations = set(('multisource_crossed_rel',)) - - def syntax_tree_search(self, *args, **kwargs): - return [] - - -class FakeDataFeedSource(FakeCardSource): - copy_based_source = True - -X_ALL_SOLS = sorted([{'X': 'Affaire'}, {'X': 'BaseTransition'}, {'X': 'Basket'}, - {'X': 'Bookmark'}, {'X': 'CWAttribute'}, {'X': 'CWCache'}, - {'X': 'CWConstraint'}, {'X': 'CWConstraintType'}, {'X': 'CWDataImport'}, {'X': 'CWEType'}, - {'X': 'CWGroup'}, {'X': 'CWPermission'}, {'X': 'CWProperty'}, - {'X': 'CWRType'}, {'X': 'CWRelation'}, - {'X': 'CWSource'}, {'X': 'CWSourceHostConfig'}, {'X': 'CWSourceSchemaConfig'}, - {'X': 'CWUser'}, {'X': 'CWUniqueTogetherConstraint'}, - {'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'}, - {'X': 'Email'}, {'X': 'EmailAddress'}, {'X': 'EmailPart'}, - {'X': 'EmailThread'}, {'X': 'ExternalUri'}, {'X': 'File'}, - {'X': 'Folder'}, {'X': 'Frozable'}, {'X': 'Note'}, {'X': 'Old'}, - {'X': 'Personne'}, {'X': 'RQLExpression'}, {'X': 'Societe'}, - {'X': 'State'}, {'X': 'SubDivision'}, {'X': 'SubWorkflowExitPoint'}, - {'X': 'Tag'}, {'X': 'TrInfo'}, {'X': 'Transition'}, - {'X': 'Workflow'}, {'X': 'WorkflowTransition'}]) - - -# keep cnx so it's not garbage collected and the associated session is closed -def setUpModule(*args): - global repo, cnx - handler = get_test_db_handler(TestServerConfiguration(apphome=BaseMSPlannerTC.datadir)) - handler.build_db_cache() - repo, cnx = handler.get_repo_and_cnx() - -def tearDownModule(*args): - global repo, cnx - del repo, cnx - - -class BaseMSPlannerTC(BasePlannerTC): - """test planner related feature on a 3-sources repository: - - * system source supporting everything - * ldap source supporting CWUser - * rql source supporting Card - """ - - def setUp(self): - self.__class__.repo = repo - #_QuerierTC.setUp(self) - self.setup() - # hijack Affaire security - affreadperms = list(self.schema['Affaire'].permissions['read']) - self.prevrqlexpr_affaire = affreadperms[-1] - # add access to type attribute so S can't be invariant - affreadperms[-1] = ERQLExpression('X concerne S?, S owned_by U, S type "X"') - self.schema['Affaire'].set_action_permissions('read', affreadperms) - # hijack CWUser security - userreadperms = list(self.schema['CWUser'].permissions['read']) - self.prevrqlexpr_user = userreadperms[-1] - userreadperms[-1] = ERQLExpression('X owned_by U') - self.schema['CWUser'].set_action_permissions('read', userreadperms) - self.add_source(FakeUserROSource, 'ldap') - self.add_source(FakeCardSource, 'cards') - self.add_source(FakeDataFeedSource, 'datafeed') - - def tearDown(self): - # restore hijacked security - self.restore_orig_affaire_security() - self.restore_orig_cwuser_security() - super(BaseMSPlannerTC, self).tearDown() - - def restore_orig_affaire_security(self): - affreadperms = list(self.schema['Affaire'].permissions['read']) - affreadperms[-1] = self.prevrqlexpr_affaire - self.schema['Affaire'].set_action_permissions('read', affreadperms) - - def restore_orig_cwuser_security(self): - if hasattr(self, '_orig_cwuser_security_restored'): - return - self._orig_cwuser_security_restored = True - userreadperms = list(self.schema['CWUser'].permissions['read']) - userreadperms[-1] = self.prevrqlexpr_user - self.schema['CWUser'].set_action_permissions('read', userreadperms) - - -class PartPlanInformationTC(BaseMSPlannerTC): - - def _test(self, rql, *args): - if len(args) == 3: - kwargs, sourcesterms, needsplit = args - else: - sourcesterms, needsplit = args - kwargs = None - plan = self._prepare_plan(rql, kwargs) - union = plan.rqlst - plan.preprocess(union) - ppi = PartPlanInformation(plan, union.children[0]) - for sourcevars in ppi._sourcesterms.itervalues(): - for var in list(sourcevars): - solindices = sourcevars.pop(var) - sourcevars[var._ms_table_key()] = solindices - self.assertEqual(ppi._sourcesterms, sourcesterms) - self.assertEqual(ppi.needsplit, needsplit) - - - def test_simple_system_only(self): - """retrieve entities only supported by the system source""" - self._test('CWGroup X', - {self.system: {'X': s[0]}}, False) - - def test_simple_system_ldap(self): - """retrieve CWUser X from both sources and return concatenation of results - """ - self._test('CWUser X', - {self.system: {'X': s[0]}, self.ldap: {'X': s[0]}}, False) - - def test_simple_system_rql(self): - """retrieve Card X from both sources and return concatenation of results - """ - self._test('Any X, XT WHERE X is Card, X title XT', - {self.system: {'X': s[0]}, self.cards: {'X': s[0]}}, False) - - def test_simple_eid_specified(self): - """retrieve CWUser X from system source (eid is specified, can locate the entity) - """ - ueid = self.session.user.eid - self._test('Any X,L WHERE X eid %(x)s, X login L', {'x': ueid}, - {self.system: {'X': s[0]}}, False) - - def test_simple_eid_invariant(self): - """retrieve CWUser X from system source (eid is specified, can locate the entity) - """ - ueid = self.session.user.eid - self._test('Any X WHERE X eid %(x)s', {'x': ueid}, - {self.system: {'x': s[0]}}, False) - - def test_simple_invariant(self): - """retrieve CWUser X from system source only (X is invariant and in_group not supported by ldap source) - """ - self._test('Any X WHERE X is CWUser, X in_group G, G name "users"', - {self.system: {'X': s[0], 'G': s[0], 'in_group': s[0]}}, False) - - def test_security_has_text(self): - """retrieve CWUser X from system source only (has_text not supported by ldap source) - """ - # specify CWUser instead of any since the way this test is written we aren't well dealing - # with ambigous query (eg only considering the first solution) - self._test('CWUser X WHERE X has_text "bla"', - {self.system: {'X': s[0]}}, False) - - def test_complex_base(self): - """ - 1. retrieve Any X, L WHERE X is CWUser, X login L from system and ldap sources, store - concatenation of results into a temporary table - 2. return the result of Any X, L WHERE X is TMP, X login L, X in_group G, - G name 'users' on the system source - """ - self._test('Any X,L WHERE X is CWUser, X in_group G, X login L, G name "users"', - {self.system: {'X': s[0], 'G': s[0], 'in_group': s[0]}, - self.ldap : {'X': s[0]}}, True) - - def test_complex_invariant_ordered(self): - """ - 1. retrieve Any X,AA WHERE X modification_date AA from system and ldap sources, store - concatenation of results into a temporary table - 2. return the result of Any X,AA ORDERBY AA WHERE %s owned_by X, X modification_date AA - on the system source - """ - ueid = self.session.user.eid - self._test('Any X,AA ORDERBY AA WHERE E eid %(x)s, E owned_by X, X modification_date AA', {'x': ueid}, - {self.system: {'x': s[0], 'X': s[0], 'owned_by': s[0]}, - self.ldap : {'X': s[0]}}, True) - - def test_complex_invariant(self): - """ - 1. retrieve Any X,L,AA WHERE X login L, X modification_date AA from system and ldap sources, store - concatenation of results into a temporary table - 2. return the result of Any X,L,AA WHERE %s owned_by X, X login L, X modification_date AA - on the system source - """ - ueid = self.session.user.eid - self._test('Any X,L,AA WHERE E eid %(x)s, E owned_by X, X login L, X modification_date AA', {'x': ueid}, - {self.system: {'x': s[0], 'X': s[0], 'owned_by': s[0]}, - self.ldap : {'X': s[0]}}, True) - - def test_complex_ambigous(self): - """retrieve CWUser X from system and ldap sources, Person X from system source only - """ - self._test('Any X,F WHERE X firstname F', - {self.system: {'X': s[0, 1]}, - self.ldap: {'X': s[0]}}, True) - - def test_complex_multiple(self): - """ - 1. retrieve Any X,A,Y,B WHERE X login A, Y login B from system and ldap sources, store - cartesian product of results into a temporary table - 2. return the result of Any X,Y WHERE X login 'syt', Y login 'adim' - on the system source - """ - ueid = self.session.user.eid - self._test('Any X,Y WHERE X login "syt", Y login "adim"', {'x': ueid}, - {self.system: {'Y': s[0], 'X': s[0]}, - self.ldap: {'Y': s[0], 'X': s[0]}}, True) - - def test_complex_aggregat(self): - solindexes = set(range(len([e for e in self.schema.entities() if not e.final]))) - self._test('Any MAX(X)', - {self.system: {'X': solindexes}}, False) - - def test_complex_optional(self): - ueid = self.session.user.eid - self._test('Any U WHERE WF wf_info_for X, X eid %(x)s, WF owned_by U?, WF from_state FS', {'x': ueid}, - {self.system: {'WF': s[0], 'FS': s[0], 'U': s[0], - 'from_state': s[0], 'owned_by': s[0], 'wf_info_for': s[0], - 'x': s[0]}}, - False) - - def test_exists4(self): - """ - State S could come from both rql source and system source, - but since X cannot come from the rql source, the solution - {self.cards : 'S'} must be removed - """ - self._test('Any G,L WHERE X in_group G, X login L, G name "managers", ' - 'EXISTS(X copain T, T login L, T login in ("comme", "cochon")) OR ' - 'EXISTS(X in_state S, S name "pascontent", NOT X copain T2, T2 login "billy")', - {self.system: {'X': s[0], 'S': s[0], 'T2': s[0], 'T': s[0], 'G': s[0], 'copain': s[0], 'in_group': s[0]}, - self.ldap: {'X': s[0], 'T2': s[0], 'T': s[0]}}, - True) - - def test_relation_need_split(self): - self._test('Any X, S WHERE X in_state S', - {self.system: {'X': s[0, 1, 2], 'S': s[0, 1, 2]}, - self.cards: {'X': s[2], 'S': s[2]}}, - True) - - def test_not_relation_need_split(self): - self._test('Any SN WHERE NOT X in_state S, S name SN', - {self.cards: {'X': s[2], 'S': s[0, 1, 2]}, - self.system: {'X': s[0, 1, 2], 'S': s[0, 1, 2]}}, - True) - - def test_not_relation_no_split_external(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - # similar to the above test but with an eid coming from the external source. - # the same plan may be used, since we won't find any record in the system source - # linking 9999999 to a state - self._test('Any SN WHERE NOT X in_state S, X eid %(x)s, S name SN', - {'x': 999999}, - {self.cards: {'x': s[0], 'S': s[0]}, - self.system: {'x': s[0], 'S': s[0]}}, - False) - - def test_relation_restriction_ambigous_need_split(self): - self._test('Any X,T WHERE X in_state S, S name "pending", T tags X', - {self.system: {'X': s[0, 1, 2], 'S': s[0, 1, 2], 'T': s[0, 1, 2], 'tags': s[0, 1, 2]}, - self.cards: {'X': s[2], 'S': s[2]}}, - True) - - def test_simplified_var(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - # need access to source since X table has to be accessed because of the outer join - self._test('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR (X require_permission P?, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s', - {'x': 999999, 'u': self.session.user.eid}, - {self.system: {'P': s[0], 'G': s[0], - 'require_permission': s[0], 'in_group': s[0], 'P': s[0], 'require_group': s[0], - 'u': s[0]}, - self.cards: {'X': s[0]}}, - True) - - def test_delete_relation1(self): - ueid = self.session.user.eid - self._test('Any X, Y WHERE X created_by Y, X eid %(x)s, NOT Y eid %(y)s', - {'x': ueid, 'y': ueid}, - {self.system: {'Y': s[0], 'created_by': s[0], 'x': s[0]}}, - False) - - def test_crossed_relation_eid_1_needattr(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - ueid = self.session.user.eid - self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T', - {'x': 999999,}, - {self.cards: {'Y': s[0]}, self.system: {'Y': s[0], 'x': s[0]}}, - True) - - def test_crossed_relation_eid_1_invariant(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y', - {'x': 999999}, - {self.system: {'Y': s[0], 'x': s[0]}}, - False) - - def test_crossed_relation_eid_2_invariant(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y', - {'x': 999999,}, - {self.cards: {'Y': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]}, - self.system: {'Y': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]}}, - False) - - def test_version_crossed_depends_on_1(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE', - {'x': 999999}, - {self.cards: {'X': s[0], 'AD': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]}, - self.system: {'X': s[0], 'AD': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]}}, - True) - - def test_version_crossed_depends_on_2(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE', - {'x': 999999}, - {self.cards: {'X': s[0], 'AD': s[0]}, - self.system: {'X': s[0], 'AD': s[0], 'x': s[0]}}, - True) - - def test_simplified_var_3(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - repo._type_source_cache[999998] = ('State', 'cards', 999998, 'cards') - self._test('Any S,T WHERE S eid %(s)s, N eid %(n)s, N type T, N is Note, S is State', - {'n': 999999, 's': 999998}, - {self.cards: {'s': s[0], 'N': s[0]}}, False) - - - -class MSPlannerTC(BaseMSPlannerTC): - - def setUp(self): - BaseMSPlannerTC.setUp(self) - self.planner = MSPlanner(self.o.schema, self.repo.vreg.rqlhelper) - for cached in ('rel_type_sources', 'can_cross_relation', 'is_multi_sources_relation'): - clear_cache(self.repo, cached) - - _test = test_plan - - def test_simple_system_only(self): - """retrieve entities only supported by the system source - """ - self._test('CWGroup X', - [('OneFetchStep', [('Any X WHERE X is CWGroup', [{'X': 'CWGroup'}])], - None, None, [self.system], {}, [])]) - - def test_simple_system_only_limit(self): - """retrieve entities only supported by the system source - """ - self._test('CWGroup X LIMIT 10', - [('OneFetchStep', [('Any X LIMIT 10 WHERE X is CWGroup', [{'X': 'CWGroup'}])], - 10, None, [self.system], {}, [])]) - - def test_simple_system_only_limit_offset(self): - """retrieve entities only supported by the system source - """ - self._test('CWGroup X LIMIT 10 OFFSET 10', - [('OneFetchStep', [('Any X LIMIT 10 OFFSET 10 WHERE X is CWGroup', [{'X': 'CWGroup'}])], - 10, 10, [self.system], {}, [])]) - - def test_simple_system_ldap(self): - """retrieve CWUser X from both sources and return concatenation of results - """ - self._test('CWUser X', - [('OneFetchStep', [('Any X WHERE X is CWUser', [{'X': 'CWUser'}])], - None, None, [self.ldap, self.system], {}, [])]) - - def test_simple_system_ldap_limit(self): - """retrieve CWUser X from both sources and return concatenation of results - """ - self._test('CWUser X LIMIT 10', - [('OneFetchStep', [('Any X LIMIT 10 WHERE X is CWUser', [{'X': 'CWUser'}])], - 10, None, [self.ldap, self.system], {}, [])]) - - def test_simple_system_ldap_limit_offset(self): - """retrieve CWUser X from both sources and return concatenation of results - """ - self._test('CWUser X LIMIT 10 OFFSET 10', - [('OneFetchStep', [('Any X LIMIT 10 OFFSET 10 WHERE X is CWUser', [{'X': 'CWUser'}])], - 10, 10, [self.ldap, self.system], {}, [])]) - - def test_simple_system_ldap_ordered_limit_offset(self): - """retrieve CWUser X from both sources and return concatenation of results - """ - self._test('CWUser X ORDERBY X LIMIT 10 OFFSET 10', - [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY table0.C0\nLIMIT 10\nOFFSET 10', None, [ - ('FetchStep', [('Any X WHERE X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], {}, {'X': 'table0.C0'}, []), - ]), - ]) - def test_simple_system_ldap_aggregat(self): - """retrieve CWUser X from both sources and return concatenation of results - """ - # COUNT(X) is kept in sub-step and transformed into SUM(X) in the AggrStep - self._test('Any COUNT(X) WHERE X is CWUser', - [('AggrStep', 'SELECT SUM(table0.C0) FROM table0', None, [ - ('FetchStep', [('Any COUNT(X) WHERE X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], {}, {'COUNT(X)': 'table0.C0'}, []), - ]), - ]) - - def test_simple_system_rql(self): - """retrieve Card X from both sources and return concatenation of results - """ - self._test('Any X, XT WHERE X is Card, X title XT', - [('OneFetchStep', [('Any X,XT WHERE X is Card, X title XT', [{'X': 'Card', 'XT': 'String'}])], - None, None, [self.cards, self.system], {}, [])]) - - def test_simple_eid_specified(self): - """retrieve CWUser X from system source (eid is specified, can locate the entity) - """ - ueid = self.session.user.eid - self._test('Any X,L WHERE X eid %(x)s, X login L', - [('OneFetchStep', [('Any X,L WHERE X eid %s, X login L'%ueid, [{'X': 'CWUser', 'L': 'String'}])], - None, None, [self.system], {}, [])], - {'x': ueid}) - - def test_simple_eid_invariant(self): - """retrieve CWUser X from system source (eid is specified, can locate the entity) - """ - ueid = self.session.user.eid - self._test('Any X WHERE X eid %(x)s', - [('OneFetchStep', [('Any %s'%ueid, [{}])], - None, None, [self.system], {}, [])], - {'x': ueid}) - - def test_simple_invariant(self): - """retrieve CWUser X from system source only (X is invariant and in_group not supported by ldap source) - """ - self._test('Any X WHERE X is CWUser, X in_group G, G name "users"', - [('OneFetchStep', [('Any X WHERE X is CWUser, X in_group G, G name "users"', - [{'X': 'CWUser', 'G': 'CWGroup'}])], - None, None, [self.system], {}, [])]) - - def test_complex_base(self): - """ - 1. retrieve Any X, L WHERE X is CWUser, X login L from system and ldap sources, store - concatenation of results into a temporary table - 2. return the result of Any X, L WHERE X is TMP, X login LX in_group G, - G name 'users' on the system source - """ - self._test('Any X,L WHERE X is CWUser, X in_group G, X login L, G name "users"', - [('FetchStep', [('Any X,L WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, - {'X': 'table0.C0', 'X.login': 'table0.C1', 'L': 'table0.C1'}, []), - ('OneFetchStep', [('Any X,L WHERE X in_group G, X login L, G name "users", G is CWGroup, X is CWUser', - [{'X': 'CWUser', 'L': 'String', 'G': 'CWGroup'}])], - None, None, [self.system], - {'X': 'table0.C0', 'X.login': 'table0.C1', 'L': 'table0.C1'}, []) - ]) - - def test_complex_base_limit_offset(self): - """ - 1. retrieve Any X, L WHERE X is CWUser, X login L from system and ldap sources, store - concatenation of results into a temporary table - 2. return the result of Any X, L WHERE X is TMP, X login LX in_group G, - G name 'users' on the system source - """ - self._test('Any X,L LIMIT 10 OFFSET 10 WHERE X is CWUser, X in_group G, X login L, G name "users"', - [('FetchStep', [('Any X,L WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, - {'X': 'table0.C0', 'X.login': 'table0.C1', 'L': 'table0.C1'}, []), - ('OneFetchStep', [('Any X,L LIMIT 10 OFFSET 10 WHERE X in_group G, X login L, G name "users", G is CWGroup, X is CWUser', - [{'X': 'CWUser', 'L': 'String', 'G': 'CWGroup'}])], - 10, 10, - [self.system], {'X': 'table0.C0', 'X.login': 'table0.C1', 'L': 'table0.C1'}, []) - ]) - - def test_complex_ordered(self): - self._test('Any L ORDERBY L WHERE X login L', - [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY table0.C0', None, - [('FetchStep', [('Any L WHERE X login L, X is CWUser', - [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], {}, {'X.login': 'table0.C0', 'L': 'table0.C0'}, []), - ]) - ]) - - def test_complex_ordered_limit_offset(self): - self._test('Any L ORDERBY L LIMIT 10 OFFSET 10 WHERE X login L', - [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY table0.C0\nLIMIT 10\nOFFSET 10', None, - [('FetchStep', [('Any L WHERE X login L, X is CWUser', - [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], {}, {'X.login': 'table0.C0', 'L': 'table0.C0'}, []), - ]) - ]) - - def test_complex_invariant_ordered(self): - """ - 1. retrieve Any X,AA WHERE X modification_date AA from system and ldap sources, store - concatenation of results into a temporary table - 2. return the result of Any X,AA ORDERBY AA WHERE %s owned_by X, X modification_date AA - on the system source - - herrr, this is what is expected by the XXX :(, not the actual result (which is correct anyway) - """ - ueid = self.session.user.eid - self._test('Any X,AA ORDERBY AA WHERE E eid %(x)s, E owned_by X, X modification_date AA', - [('FetchStep', - [('Any X,AA WHERE X modification_date AA, X is CWUser', - [{'AA': 'Datetime', 'X': 'CWUser'}])], - [self.ldap, self.system], None, - {'AA': 'table0.C1', 'X': 'table0.C0', 'X.modification_date': 'table0.C1'}, []), - ('OneFetchStep', - [('Any X,AA ORDERBY AA WHERE %s owned_by X, X modification_date AA, X is CWUser' % ueid, - [{'AA': 'Datetime', 'X': 'CWUser'}])], - None, None, [self.system], - {'AA': 'table0.C1', 'X': 'table0.C0', 'X.modification_date': 'table0.C1'}, []), - ], - {'x': ueid}) - - def test_complex_invariant(self): - """ - 1. retrieve Any X,L,AA WHERE X login L, X modification_date AA from system and ldap sources, store - concatenation of results into a temporary table - 2. return the result of Any X,L,AA WHERE %s owned_by X, X login L, X modification_date AA - on the system source - """ - ueid = self.session.user.eid - self._test('Any X,L,AA WHERE E eid %(x)s, E owned_by X, X login L, X modification_date AA', - [('FetchStep', [('Any X,L,AA WHERE X login L, X modification_date AA, X is CWUser', - [{'AA': 'Datetime', 'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, - {'AA': 'table0.C2', 'X': 'table0.C0', 'X.login': 'table0.C1', 'X.modification_date': 'table0.C2', 'L': 'table0.C1'}, []), - ('OneFetchStep', [('Any X,L,AA WHERE %s owned_by X, X login L, X modification_date AA, X is CWUser'%ueid, - [{'AA': 'Datetime', 'X': 'CWUser', 'L': 'String'}])], - None, None, [self.system], - {'AA': 'table0.C2', 'X': 'table0.C0', 'X.login': 'table0.C1', 'X.modification_date': 'table0.C2', 'L': 'table0.C1'}, [])], - {'x': ueid}) - - def test_complex_ambigous(self): - """retrieve CWUser X from system and ldap sources, Person X from system source only - """ - self._test('Any X,F WHERE X firstname F', - [('UnionStep', None, None, [ - ('OneFetchStep', [('Any X,F WHERE X firstname F, X is CWUser', - [{'X': 'CWUser', 'F': 'String'}])], - None, None, [self.ldap, self.system], {}, []), - ('OneFetchStep', [('Any X,F WHERE X firstname F, X is Personne', - [{'X': 'Personne', 'F': 'String'}])], - None, None, [self.system], {}, []), - ]), - ]) - - def test_complex_ambigous_limit_offset(self): - """retrieve CWUser X from system and ldap sources, Person X from system source only - """ - self._test('Any X,F LIMIT 10 OFFSET 10 WHERE X firstname F', - [('UnionStep', 10, 10, [ - ('OneFetchStep', [('Any X,F WHERE X firstname F, X is CWUser', - [{'X': 'CWUser', 'F': 'String'}])], - None, None, - [self.ldap, self.system], {}, []), - ('OneFetchStep', [('Any X,F WHERE X firstname F, X is Personne', - [{'X': 'Personne', 'F': 'String'}])], - None, None, [self.system], {}, []), - ]), - ]) - - def test_complex_ambigous_ordered(self): - """ - 1. retrieve CWUser X from system and ldap sources, Person X from system source only, store - each result in the same temp table - 2. return content of the table sorted - """ - self._test('Any X,F ORDERBY F WHERE X firstname F', - [('AggrStep', 'SELECT table0.C0, table0.C1 FROM table0\nORDER BY table0.C1', None, - [('FetchStep', [('Any X,F WHERE X firstname F, X is CWUser', - [{'X': 'CWUser', 'F': 'String'}])], - [self.ldap, self.system], {}, - {'X': 'table0.C0', 'X.firstname': 'table0.C1', 'F': 'table0.C1'}, []), - ('FetchStep', [('Any X,F WHERE X firstname F, X is Personne', - [{'X': 'Personne', 'F': 'String'}])], - [self.system], {}, - {'X': 'table0.C0', 'X.firstname': 'table0.C1', 'F': 'table0.C1'}, []), - ]), - ]) - - def test_complex_multiple(self): - """ - 1. retrieve Any X,A,Y,B WHERE X login A, Y login B from system and ldap sources, store - cartesian product of results into a temporary table - 2. return the result of Any X,Y WHERE X login 'syt', Y login 'adim' - on the system source - """ - ueid = self.session.user.eid - self._test('Any X,Y WHERE X login "syt", Y login "adim"', - [('FetchStep', - [('Any X WHERE X login "syt", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, - {'X': 'table0.C0'}, []), - ('FetchStep', - [('Any Y WHERE Y login "adim", Y is CWUser', [{'Y': 'CWUser'}])], - [self.ldap, self.system], None, - {'Y': 'table1.C0'}, []), - ('OneFetchStep', - [('Any X,Y WHERE X is CWUser, Y is CWUser', [{'X': 'CWUser', 'Y': 'CWUser'}])], - None, None, [self.system], - {'X': 'table0.C0', 'Y': 'table1.C0'}, []) - ], {'x': ueid}) - - def test_complex_multiple_limit_offset(self): - """ - 1. retrieve Any X,A,Y,B WHERE X login A, Y login B from system and ldap sources, store - cartesian product of results into a temporary table - 2. return the result of Any X,Y WHERE X login 'syt', Y login 'adim' - on the system source - """ - self._test('Any X,Y LIMIT 10 OFFSET 10 WHERE X login "syt", Y login "adim"', - [('FetchStep', - [('Any X WHERE X login "syt", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C0'}, []), - ('FetchStep', - [('Any Y WHERE Y login "adim", Y is CWUser', [{'Y': 'CWUser'}])], - [self.ldap, self.system], None, {'Y': 'table1.C0'}, []), - ('OneFetchStep', - [('Any X,Y LIMIT 10 OFFSET 10 WHERE X is CWUser, Y is CWUser', [{'X': 'CWUser', 'Y': 'CWUser'}])], - 10, 10, [self.system], - {'X': 'table0.C0', 'Y': 'table1.C0'}, []) - ]) - - def test_complex_aggregat(self): - self._test('Any MAX(X)', - [('OneFetchStep', - [('Any MAX(X)', X_ALL_SOLS)], - None, None, [self.system], {}, []) - ]) - - def test_complex_typed_aggregat(self): - self._test('Any MAX(X) WHERE X is Card', - [('AggrStep', 'SELECT MAX(table0.C0) FROM table0', None, - [('FetchStep', - [('Any MAX(X) WHERE X is Card', [{'X': 'Card'}])], - [self.cards, self.system], {}, {'MAX(X)': 'table0.C0'}, []) - ]) - ]) - - def test_complex_greater_eid(self): - self._test('Any X WHERE X eid > 12', - [('OneFetchStep', - [('Any X WHERE X eid > 12', X_ALL_SOLS)], - None, None, [self.system], {}, []) - ]) - - def test_complex_greater_typed_eid(self): - self._test('Any X WHERE X eid > 12, X is Card', - [('OneFetchStep', - [('Any X WHERE X eid > 12, X is Card', [{'X': 'Card'}])], - None, None, [self.system], {}, []) - ]) - - def test_complex_optional(self): - ueid = self.session.user.eid - self._test('Any U WHERE WF wf_info_for X, X eid %(x)s, WF owned_by U?, WF from_state FS', - [('OneFetchStep', [('Any U WHERE WF wf_info_for %s, WF owned_by U?, WF from_state FS' % ueid, - [{'WF': 'TrInfo', 'FS': 'State', 'U': 'CWUser'}])], - None, None, [self.system], {}, [])], - {'x': ueid}) - - def test_complex_optional(self): - ueid = self.session.user.eid - self._test('Any U WHERE WF wf_info_for X, X eid %(x)s, WF owned_by U?, WF from_state FS', - [('OneFetchStep', [('Any U WHERE WF wf_info_for %s, WF owned_by U?, WF from_state FS' % ueid, - [{'WF': 'TrInfo', 'FS': 'State', 'U': 'CWUser'}])], - None, None, [self.system], {}, [])], - {'x': ueid}) - - - def test_3sources_ambigous(self): - self._test('Any X,T WHERE X owned_by U, U login "syt", X title T, X is IN(Bookmark, Card, EmailThread)', - [('FetchStep', [('Any X,T WHERE X title T, X is Card', [{'X': 'Card', 'T': 'String'}])], - [self.cards, self.system], None, - {'T': 'table0.C1', 'X': 'table0.C0', 'X.title': 'table0.C1'}, []), - ('FetchStep', [('Any U WHERE U login "syt", U is CWUser', [{'U': 'CWUser'}])], - [self.ldap, self.system], None, - {'U': 'table1.C0'}, []), - ('UnionStep', None, None, [ - ('OneFetchStep', [('Any X,T WHERE X owned_by U, X title T, U is CWUser, X is IN(Bookmark, EmailThread)', - [{'T': 'String', 'U': 'CWUser', 'X': 'Bookmark'}, - {'T': 'String', 'U': 'CWUser', 'X': 'EmailThread'}])], - None, None, [self.system], {'U': 'table1.C0'}, []), - ('OneFetchStep', [('Any X,T WHERE X owned_by U, X title T, U is CWUser, X is Card', - [{'X': 'Card', 'U': 'CWUser', 'T': 'String'}])], - None, None, [self.system], - {'X': 'table0.C0', 'X.title': 'table0.C1', 'T': 'table0.C1', 'U': 'table1.C0'}, []), - ]), - ]) - - def test_restricted_max(self): - # dumb query to emulate the one generated by svnfile.entities.rql_revision_content - self._test('Any V, MAX(VR) WHERE V is Card, V creation_date VR, ' - '(V creation_date TODAY OR (V creation_date < TODAY AND NOT EXISTS(' - 'X is Card, X creation_date < TODAY, X creation_date >= VR)))', - [('FetchStep', [('Any VR WHERE X creation_date < TODAY, X creation_date VR, X is Card', - [{'X': 'Card', 'VR': 'Datetime'}])], - [self.cards, self.system], None, - {'VR': 'table0.C0', 'X.creation_date': 'table0.C0'}, []), - ('FetchStep', [('Any V,VR WHERE V creation_date VR, V is Card', - [{'VR': 'Datetime', 'V': 'Card'}])], - [self.cards, self.system], None, - {'VR': 'table1.C1', 'V': 'table1.C0', 'V.creation_date': 'table1.C1'}, []), - ('OneFetchStep', [('Any V,MAX(VR) WHERE V creation_date VR, (V creation_date TODAY) OR (V creation_date < TODAY, NOT EXISTS(X creation_date >= VR, X is Card)), V is Card', - [{'X': 'Card', 'VR': 'Datetime', 'V': 'Card'}])], - None, None, [self.system], - {'VR': 'table1.C1', 'V': 'table1.C0', 'V.creation_date': 'table1.C1', 'X.creation_date': 'table0.C0'}, []) - ]) - - def test_outer_supported_rel1(self): - # both system and rql support all variables, can be - self._test('Any X, R WHERE X is Note, X in_state S, X type R, ' - 'NOT EXISTS(Y is Note, Y in_state S, Y type R, X identity Y)', - [('OneFetchStep', [('Any X,R WHERE X is Note, X in_state S, X type R, NOT EXISTS(Y is Note, Y in_state S, Y type R, X identity Y), S is State', - [{'Y': 'Note', 'X': 'Note', 'S': 'State', 'R': 'String'}])], - None, None, - [self.cards, self.system], {}, []) - ]) - - def test_not_identity(self): - ueid = self.session.user.eid - self._test('Any X WHERE NOT X identity U, U eid %s, X is CWUser' % ueid, - [('OneFetchStep', - [('Any X WHERE NOT X identity %s, X is CWUser' % ueid, [{'X': 'CWUser'}])], - None, None, - [self.ldap, self.system], {}, []) - ]) - - def test_outer_supported_rel2(self): - self._test('Any X, MAX(R) GROUPBY X WHERE X in_state S, X login R, ' - 'NOT EXISTS(Y is Note, Y in_state S, Y type R)', - [('FetchStep', [('Any A,R WHERE Y in_state A, Y type R, A is State, Y is Note', - [{'Y': 'Note', 'A': 'State', 'R': 'String'}])], - [self.cards, self.system], None, - {'A': 'table0.C0', 'R': 'table0.C1', 'Y.type': 'table0.C1'}, []), - ('FetchStep', [('Any X,R WHERE X login R, X is CWUser', [{'X': 'CWUser', 'R': 'String'}])], - [self.ldap, self.system], None, - {'X': 'table1.C0', 'X.login': 'table1.C1', 'R': 'table1.C1'}, []), - ('OneFetchStep', [('Any X,MAX(R) GROUPBY X WHERE X in_state S, X login R, NOT EXISTS(Y type R, S identity A, A is State, Y is Note), S is State, X is CWUser', - [{'Y': 'Note', 'X': 'CWUser', 'S': 'State', 'R': 'String', 'A': 'State'}])], - None, None, [self.system], - {'A': 'table0.C0', 'X': 'table1.C0', 'X.login': 'table1.C1', 'R': 'table1.C1', 'Y.type': 'table0.C1'}, []) - ]) - - def test_security_has_text(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X WHERE X has_text "bla"', - [('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])], - [self.cards, self.system], None, {'E': 'table0.C0'}, []), - ('UnionStep', None, None, - [('OneFetchStep', - [(u'Any X WHERE X has_text "bla", (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), X is Affaire' % {'ueid': ueid}, - [{'C': 'Division', 'E': 'Note', 'D': 'Affaire', 'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire', 'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire'}])], - None, None, [self.system], {'E': 'table0.C0'}, []), - ('OneFetchStep', - [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is IN(Basket, CWUser)' % ueid, - [{'X': 'Basket'}, {'X': 'CWUser'}]), - ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Note, Personne, Societe, SubDivision, Tag)', - [{'X': 'Card'}, {'X': 'Comment'}, - {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailThread'}, - {'X': 'File'}, {'X': 'Folder'}, - {'X': 'Note'}, {'X': 'Personne'}, {'X': 'Societe'}, - {'X': 'SubDivision'}, {'X': 'Tag'}]),], - None, None, [self.system], {}, []), - ]) - ]) - - def test_security_has_text_limit_offset(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - # note: same as the above query but because of the subquery usage, the - # display differs (not printing solutions for each union) - self._test('Any X LIMIT 10 OFFSET 10 WHERE X has_text "bla"', - [('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])], - [self.cards, self.system], None, {'E': 'table1.C0'}, []), - ('UnionFetchStep', [ - ('FetchStep', [('Any X WHERE X has_text "bla", (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), X is Affaire' % {'ueid': ueid}, - [{'C': 'Division', 'E': 'Note', 'D': 'Affaire', 'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire', 'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire'}])], - [self.system], {'E': 'table1.C0'}, {'X': 'table0.C0'}, []), - ('FetchStep', - [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is IN(Basket, CWUser)' % ueid, - [{'X': 'Basket'}, {'X': 'CWUser'}]), - ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Note, Personne, Societe, SubDivision, Tag)', - [{'X': 'Card'}, {'X': 'Comment'}, - {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailThread'}, - {'X': 'File'}, {'X': 'Folder'}, - {'X': 'Note'}, {'X': 'Personne'}, {'X': 'Societe'}, - {'X': 'SubDivision'}, {'X': 'Tag'}])], - [self.system], {}, {'X': 'table0.C0'}, []), - ]), - ('OneFetchStep', - [('Any X LIMIT 10 OFFSET 10', - [{'X': 'Affaire'}, {'X': 'Basket'}, - {'X': 'CWUser'}, {'X': 'Card'}, {'X': 'Comment'}, - {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailThread'}, - {'X': 'File'}, {'X': 'Folder'}, - {'X': 'Note'}, {'X': 'Personne'}, {'X': 'Societe'}, - {'X': 'SubDivision'}, {'X': 'Tag'}])], - 10, 10, [self.system], {'X': 'table0.C0'}, []) - ]) - - def test_security_user(self): - """a guest user trying to see another user: EXISTS(X owned_by U) is automatically inserted""" - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X WHERE X login "bla"', - [('FetchStep', - [('Any X WHERE X login "bla", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C0'}, []), - ('OneFetchStep', - [('Any X WHERE EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C0'}, [])]) - - def test_security_complex_has_text(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X WHERE X has_text "bla", X firstname "bla"', - [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C0'}, []), - ('UnionStep', None, None, [ - ('OneFetchStep', [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C0'}, []), - ('OneFetchStep', [('Any X WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])], - None, None, [self.system], {}, []), - ]), - ]) - - def test_security_complex_has_text_limit_offset(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X LIMIT 10 OFFSET 10 WHERE X has_text "bla", X firstname "bla"', - [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table1.C0'}, []), - ('UnionFetchStep', [ - ('FetchStep', [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])], - [self.system], {'X': 'table1.C0'}, {'X': 'table0.C0'}, []), - ('FetchStep', [('Any X WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])], - [self.system], {}, {'X': 'table0.C0'}, []), - ]), - ('OneFetchStep', - [('Any X LIMIT 10 OFFSET 10', [{'X': 'CWUser'}, {'X': 'Personne'}])], - 10, 10, [self.system], {'X': 'table0.C0'}, []) - ]) - - def test_security_complex_aggregat(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - ALL_SOLS = X_ALL_SOLS[:] - ALL_SOLS.remove({'X': 'CWSourceHostConfig'}) # not authorized - ALL_SOLS.remove({'X': 'CWSourceSchemaConfig'}) # not authorized - ALL_SOLS.remove({'X': 'CWDataImport'}) # not authorized - ALL_SOLS.remove({'X': 'Frozable'}) # not authorized - self._test('Any MAX(X)', - [('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])], - [self.cards, self.system], None, {'E': 'table1.C0'}, []), - ('FetchStep', [('Any X WHERE X is IN(CWUser)', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table2.C0'}, []), - ('UnionFetchStep', [ - ('FetchStep', [('Any X WHERE EXISTS(%s use_email X), X is EmailAddress' % ueid, - [{'X': 'EmailAddress'}])], - [self.system], {}, {'X': 'table0.C0'}, []), - ('UnionFetchStep', - [('FetchStep', [('Any X WHERE X is IN(Card, Note, State)', - [{'X': 'Card'}, {'X': 'Note'}, {'X': 'State'}])], - [self.cards, self.system], {}, {'X': 'table0.C0'}, []), - ('FetchStep', - [('Any X WHERE X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWUniqueTogetherConstraint, Comment, Division, Email, EmailPart, EmailThread, ExternalUri, File, Folder, Old, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', - [{'X': 'BaseTransition'}, {'X': 'Bookmark'}, - {'X': 'CWAttribute'}, {'X': 'CWCache'}, - {'X': 'CWConstraint'}, {'X': 'CWConstraintType'}, - {'X': 'CWEType'}, {'X': 'CWGroup'}, - {'X': 'CWPermission'}, {'X': 'CWProperty'}, - {'X': 'CWRType'}, {'X': 'CWRelation'}, - {'X': 'CWSource'}, - {'X': 'CWUniqueTogetherConstraint'}, - {'X': 'Comment'}, {'X': 'Division'}, - {'X': 'Email'}, - {'X': 'EmailPart'}, {'X': 'EmailThread'}, - {'X': 'ExternalUri'}, {'X': 'File'}, - {'X': 'Folder'}, {'X': 'Old'}, - {'X': 'Personne'}, {'X': 'RQLExpression'}, - {'X': 'Societe'}, {'X': 'SubDivision'}, - {'X': 'SubWorkflowExitPoint'}, {'X': 'Tag'}, - {'X': 'TrInfo'}, {'X': 'Transition'}, - {'X': 'Workflow'}, {'X': 'WorkflowTransition'}])], - [self.system], {}, {'X': 'table0.C0'}, []), - ]), - ('FetchStep', [('Any X WHERE (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), X is Affaire' % {'ueid': ueid}, - [{'C': 'Division', 'E': 'Note', 'D': 'Affaire', 'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire', 'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire'}])], - [self.system], {'E': 'table1.C0'}, {'X': 'table0.C0'}, []), - ('UnionFetchStep', [ - ('FetchStep', [('Any X WHERE EXISTS(X owned_by %s), X is Basket' % ueid, - [{'X': 'Basket'}])], - [self.system], {}, {'X': 'table0.C0'}, []), - ('FetchStep', [('Any X WHERE EXISTS(X owned_by %s), X is CWUser' % ueid, - [{'X': 'CWUser'}])], - [self.system], {'X': 'table2.C0'}, {'X': 'table0.C0'}, []), - ]), - ]), - ('OneFetchStep', [('Any MAX(X)', ALL_SOLS)], - None, None, [self.system], {'X': 'table0.C0'}, []) - ]) - - def test_security_complex_aggregat2(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - X_ET_ALL_SOLS = [] - for s in X_ALL_SOLS: - if s in ({'X': 'CWSourceHostConfig'}, {'X': 'CWSourceSchemaConfig'}, - {'X': 'CWDataImport'}, {'X': 'Frozable'}): - continue # not authorized - ets = {'ET': 'CWEType'} - ets.update(s) - X_ET_ALL_SOLS.append(ets) - self._test('Any ET, COUNT(X) GROUPBY ET ORDERBY ET WHERE X is ET', - [('FetchStep', [('Any X WHERE X is IN(Card, Note, State)', - [{'X': 'Card'}, {'X': 'Note'}, {'X': 'State'}])], - [self.cards, self.system], None, {'X': 'table1.C0'}, []), - ('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])], - [self.cards, self.system], None, {'E': 'table2.C0'}, []), - ('FetchStep', [('Any X WHERE X is IN(CWUser)', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table3.C0'}, []), - ('UnionFetchStep', - [('FetchStep', [('Any ET,X WHERE X is ET, EXISTS(%s use_email X), ET is CWEType, X is EmailAddress' % ueid, - [{'ET': 'CWEType', 'X': 'EmailAddress'}]), - ], - [self.system], {}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []), - # extra UnionFetchStep could be avoided but has no cost, so don't care - ('UnionFetchStep', - [('FetchStep', [('Any ET,X WHERE X is ET, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWUniqueTogetherConstraint, Comment, Division, Email, EmailPart, EmailThread, ExternalUri, File, Folder, Old, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', - [{'X': 'BaseTransition', 'ET': 'CWEType'}, - {'X': 'Bookmark', 'ET': 'CWEType'}, {'X': 'CWAttribute', 'ET': 'CWEType'}, - {'X': 'CWCache', 'ET': 'CWEType'}, {'X': 'CWConstraint', 'ET': 'CWEType'}, - {'X': 'CWConstraintType', 'ET': 'CWEType'}, - {'X': 'CWEType', 'ET': 'CWEType'}, - {'X': 'CWGroup', 'ET': 'CWEType'}, {'X': 'CWPermission', 'ET': 'CWEType'}, - {'X': 'CWProperty', 'ET': 'CWEType'}, {'X': 'CWRType', 'ET': 'CWEType'}, - {'X': 'CWSource', 'ET': 'CWEType'}, - {'X': 'CWRelation', 'ET': 'CWEType'}, - {'X': 'CWUniqueTogetherConstraint', 'ET': 'CWEType'}, - {'X': 'Comment', 'ET': 'CWEType'}, - {'X': 'Division', 'ET': 'CWEType'}, {'X': 'Email', 'ET': 'CWEType'}, - {'X': 'EmailPart', 'ET': 'CWEType'}, - {'X': 'EmailThread', 'ET': 'CWEType'}, {'X': 'ExternalUri', 'ET': 'CWEType'}, - {'X': 'File', 'ET': 'CWEType'}, {'X': 'Folder', 'ET': 'CWEType'}, - {'X': 'Old', 'ET': 'CWEType'}, {'X': 'Personne', 'ET': 'CWEType'}, - {'X': 'RQLExpression', 'ET': 'CWEType'}, {'X': 'Societe', 'ET': 'CWEType'}, - {'X': 'SubDivision', 'ET': 'CWEType'}, {'X': 'SubWorkflowExitPoint', 'ET': 'CWEType'}, - {'X': 'Tag', 'ET': 'CWEType'}, {'X': 'TrInfo', 'ET': 'CWEType'}, - {'X': 'Transition', 'ET': 'CWEType'}, {'X': 'Workflow', 'ET': 'CWEType'}, - {'X': 'WorkflowTransition', 'ET': 'CWEType'}])], - [self.system], {}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []), - ('FetchStep', - [('Any ET,X WHERE X is ET, ET is CWEType, X is IN(Card, Note, State)', - [{'ET': 'CWEType', 'X': 'Card'}, - {'ET': 'CWEType', 'X': 'Note'}, - {'ET': 'CWEType', 'X': 'State'}])], - [self.system], {'X': 'table1.C0'}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []), - ]), - - ('FetchStep', [('Any ET,X WHERE X is ET, (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), ET is CWEType, X is Affaire' % {'ueid': ueid}, - [{'C': 'Division', 'E': 'Note', 'D': 'Affaire', - 'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire', - 'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire', - 'ET': 'CWEType'}])], - [self.system], {'E': 'table2.C0'}, {'ET': 'table0.C0', 'X': 'table0.C1'}, - []), - ('UnionFetchStep', [ - ('FetchStep', [('Any ET,X WHERE X is ET, EXISTS(X owned_by %s), ET is CWEType, X is Basket' % ueid, - [{'ET': 'CWEType', 'X': 'Basket'}])], - [self.system], {}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []), - ('FetchStep', [('Any ET,X WHERE X is ET, EXISTS(X owned_by %s), ET is CWEType, X is CWUser' % ueid, - [{'ET': 'CWEType', 'X': 'CWUser'}])], - [self.system], {'X': 'table3.C0'}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []), - ]), - ]), - ('OneFetchStep', - [('Any ET,COUNT(X) GROUPBY ET ORDERBY ET', X_ET_ALL_SOLS)], - None, None, [self.system], {'ET': 'table0.C0', 'X': 'table0.C1'}, []) - ]) - - def test_security_3sources(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X, XT WHERE X is Card, X owned_by U, X title XT, U login "syt"', - [('FetchStep', - [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], - [self.cards, self.system], None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []), - ('FetchStep', - [('Any U WHERE U login "syt", U is CWUser', [{'U': 'CWUser'}])], - [self.ldap, self.system], None, {'U': 'table1.C0'}, []), - ('OneFetchStep', - [('Any X,XT WHERE X owned_by U, X title XT, EXISTS(U owned_by %s), U is CWUser, X is Card' % ueid, - [{'X': 'Card', 'U': 'CWUser', 'XT': 'String'}])], - None, None, [self.system], - {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1', 'U': 'table1.C0'}, []) - ]) - - def test_security_3sources_identity(self): - self.restore_orig_cwuser_security() - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X, XT WHERE X is Card, X owned_by U, X title XT, U login "syt"', - [('FetchStep', - [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], - [self.cards, self.system], None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []), - ('OneFetchStep', - [('Any X,XT WHERE X owned_by U, X title XT, U login "syt", EXISTS(U identity %s), U is CWUser, X is Card' % ueid, - [{'U': 'CWUser', 'X': 'Card', 'XT': 'String'}])], - None, None, [self.system], {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []) - ]) - - def test_security_3sources_identity_optional_var(self): - self.restore_orig_cwuser_security() - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X,XT,U WHERE X is Card, X owned_by U?, X title XT, U login L', - [('FetchStep', - [('Any U,L WHERE U login L, EXISTS(U identity %s), U is CWUser' % ueid, - [{'L': 'String', u'U': 'CWUser'}])], - [self.system], {}, {'L': 'table0.C1', 'U': 'table0.C0', 'U.login': 'table0.C1'}, []), - ('FetchStep', - [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], - [self.cards, self.system], None, {'X': 'table1.C0', 'X.title': 'table1.C1', 'XT': 'table1.C1'}, []), - ('OneFetchStep', - [('Any X,XT,U WHERE X owned_by U?, X title XT, X is Card', - [{'X': 'Card', 'U': 'CWUser', 'XT': 'String'}])], - None, None, [self.system], {'L': 'table0.C1', - 'U': 'table0.C0', - 'X': 'table1.C0', - 'X.title': 'table1.C1', - 'XT': 'table1.C1'}, []) - ]) - - def test_security_3sources_limit_offset(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X, XT LIMIT 10 OFFSET 10 WHERE X is Card, X owned_by U, X title XT, U login "syt"', - [('FetchStep', - [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], - [self.cards, self.system], None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []), - ('FetchStep', - [('Any U WHERE U login "syt", U is CWUser', [{'U': 'CWUser'}])], - [self.ldap, self.system], None, {'U': 'table1.C0'}, []), - ('OneFetchStep', - [('Any X,XT LIMIT 10 OFFSET 10 WHERE X owned_by U, X title XT, EXISTS(U owned_by %s), U is CWUser, X is Card' % ueid, - [{'X': 'Card', 'U': 'CWUser', 'XT': 'String'}])], - 10, 10, [self.system], - {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1', 'U': 'table1.C0'}, []) - ]) - - def test_exists_base(self): - self._test('Any X,L,S WHERE X in_state S, X login L, EXISTS(X in_group G, G name "bougloup")', - [('FetchStep', [('Any X,L WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, {'X': 'table0.C0', 'X.login': 'table0.C1', 'L': 'table0.C1'}, []), - ('OneFetchStep', [("Any X,L,S WHERE X in_state S, X login L, " - 'EXISTS(X in_group G, G name "bougloup", G is CWGroup), S is State, X is CWUser', - [{'X': 'CWUser', 'L': 'String', 'S': 'State', 'G': 'CWGroup'}])], - None, None, [self.system], - {'X': 'table0.C0', 'X.login': 'table0.C1', 'L': 'table0.C1'}, [])]) - - def test_exists_complex(self): - self._test('Any G WHERE X in_group G, G name "managers", EXISTS(X copain T, T login in ("comme", "cochon"))', - [('FetchStep', [('Any T WHERE T login IN("comme", "cochon"), T is CWUser', [{'T': 'CWUser'}])], - [self.ldap, self.system], None, {'T': 'table0.C0'}, []), - ('OneFetchStep', - [('Any G WHERE X in_group G, G name "managers", EXISTS(X copain T, T is CWUser), G is CWGroup, X is CWUser', - [{'X': 'CWUser', 'T': 'CWUser', 'G': 'CWGroup'}])], - None, None, [self.system], {'T': 'table0.C0'}, [])]) - - def test_exists3(self): - self._test('Any G,L WHERE X in_group G, X login L, G name "managers", EXISTS(X copain T, T login in ("comme", "cochon"))', - [('FetchStep', - [('Any T WHERE T login IN("comme", "cochon"), T is CWUser', - [{'T': 'CWUser'}])], - [self.ldap, self.system], None, {'T': 'table0.C0'}, []), - ('FetchStep', - [('Any L,X WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, - {'X': 'table1.C1', 'X.login': 'table1.C0', 'L': 'table1.C0'}, []), - ('OneFetchStep', - [('Any G,L WHERE X in_group G, X login L, G name "managers", EXISTS(X copain T, T is CWUser), G is CWGroup, X is CWUser', - [{'G': 'CWGroup', 'L': 'String', 'T': 'CWUser', 'X': 'CWUser'}])], - None, None, - [self.system], {'T': 'table0.C0', 'X': 'table1.C1', 'X.login': 'table1.C0', 'L': 'table1.C0'}, [])]) - - def test_exists4(self): - self._test('Any G,L WHERE X in_group G, X login L, G name "managers", ' - 'EXISTS(X copain T, T login L, T login in ("comme", "cochon")) OR ' - 'EXISTS(X in_state S, S name "pascontent", NOT X copain T2, T2 login "billy")', - [('FetchStep', - [('Any T,L WHERE T login L, T login IN("comme", "cochon"), T is CWUser', [{'T': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, - {'T': 'table0.C0', 'T.login': 'table0.C1', 'L': 'table0.C1'}, []), - ('FetchStep', - [('Any T2 WHERE T2 login "billy", T2 is CWUser', [{'T2': 'CWUser'}])], - [self.ldap, self.system], None, {'T2': 'table1.C0'}, []), - ('FetchStep', - [('Any L,X WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, {'X': 'table2.C1', 'X.login': 'table2.C0', 'L': 'table2.C0'}, []), - ('OneFetchStep', - [('Any G,L WHERE X in_group G, X login L, G name "managers", (EXISTS(X copain T, T login L, T is CWUser)) OR (EXISTS(X in_state S, S name "pascontent", NOT EXISTS(X copain T2), S is State)), G is CWGroup, T2 is CWUser, X is CWUser', - [{'G': 'CWGroup', 'L': 'String', 'S': 'State', 'T': 'CWUser', 'T2': 'CWUser', 'X': 'CWUser'}])], - None, None, [self.system], - {'T2': 'table1.C0', 'L': 'table2.C0', - 'T': 'table0.C0', 'T.login': 'table0.C1', 'X': 'table2.C1', 'X.login': 'table2.C0'}, [])]) - - def test_exists5(self): - self._test('Any GN,L WHERE X in_group G, X login L, G name GN, ' - 'EXISTS(X copain T, T login in ("comme", "cochon")) AND ' - 'NOT EXISTS(X copain T2, T2 login "billy")', - [('FetchStep', [('Any T WHERE T login IN("comme", "cochon"), T is CWUser', - [{'T': 'CWUser'}])], - [self.ldap, self.system], None, {'T': 'table0.C0'}, []), - ('FetchStep', [('Any T2 WHERE T2 login "billy", T2 is CWUser', [{'T2': 'CWUser'}])], - [self.ldap, self.system], None, {'T2': 'table1.C0'}, []), - ('FetchStep', [('Any L,X WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, - {'X': 'table2.C1', 'X.login': 'table2.C0', 'L': 'table2.C0'}, []), - ('OneFetchStep', [('Any GN,L WHERE X in_group G, X login L, G name GN, EXISTS(X copain T, T is CWUser), NOT EXISTS(X copain T2, T2 is CWUser), G is CWGroup, X is CWUser', - [{'G': 'CWGroup', 'GN': 'String', 'L': 'String', 'T': 'CWUser', 'T2': 'CWUser', 'X': 'CWUser'}])], - None, None, [self.system], - {'T': 'table0.C0', 'T2': 'table1.C0', - 'X': 'table2.C1', 'X.login': 'table2.C0', 'L': 'table2.C0'}, [])]) - - def test_exists_security_no_invariant(self): - ueid = self.session.user.eid - self._test('Any X,AA,AB,AC,AD ORDERBY AA WHERE X is CWUser, X login AA, X firstname AB, X surname AC, X modification_date AD, A eid %(B)s, \ - EXISTS(((X identity A) OR \ - (EXISTS(X in_group C, C name IN("managers", "staff"), C is CWGroup))) OR \ - (EXISTS(X in_group D, A in_group D, NOT D name "users", D is CWGroup)))', - [('FetchStep', [('Any X,AA,AB,AC,AD WHERE X login AA, X firstname AB, X surname AC, X modification_date AD, X is CWUser', - [{'AA': 'String', 'AB': 'String', 'AC': 'String', 'AD': 'Datetime', - 'X': 'CWUser'}])], - [self.ldap, self.system], None, {'AA': 'table0.C1', 'AB': 'table0.C2', - 'AC': 'table0.C3', 'AD': 'table0.C4', - 'X': 'table0.C0', - 'X.firstname': 'table0.C2', - 'X.login': 'table0.C1', - 'X.modification_date': 'table0.C4', - 'X.surname': 'table0.C3'}, []), - ('OneFetchStep', [('Any X,AA,AB,AC,AD ORDERBY AA WHERE X login AA, X firstname AB, X surname AC, X modification_date AD, EXISTS(((X identity %(ueid)s) OR (EXISTS(X in_group C, C name IN("managers", "staff"), C is CWGroup))) OR (EXISTS(X in_group D, %(ueid)s in_group D, NOT D name "users", D is CWGroup))), X is CWUser' % {'ueid': ueid}, - [{'AA': 'String', 'AB': 'String', 'AC': 'String', 'AD': 'Datetime', - 'C': 'CWGroup', 'D': 'CWGroup', 'X': 'CWUser'}])], - None, None, [self.system], - {'AA': 'table0.C1', 'AB': 'table0.C2', 'AC': 'table0.C3', 'AD': 'table0.C4', - 'X': 'table0.C0', - 'X.firstname': 'table0.C2', 'X.login': 'table0.C1', 'X.modification_date': 'table0.C4', 'X.surname': 'table0.C3'}, - [])], - {'B': ueid}) - - def test_relation_need_split(self): - self._test('Any X, S WHERE X in_state S', - [('UnionStep', None, None, [ - ('OneFetchStep', [('Any X,S WHERE X in_state S, S is State, X is IN(Affaire, CWUser)', - [{'X': 'Affaire', 'S': 'State'}, {'X': 'CWUser', 'S': 'State'}])], - None, None, [self.system], {}, []), - ('OneFetchStep', [('Any X,S WHERE X in_state S, S is State, X is Note', - [{'X': 'Note', 'S': 'State'}])], - None, None, [self.cards, self.system], {}, []), - ])]) - - def test_relation_selection_need_split(self): - self._test('Any X,S,U WHERE X in_state S, X todo_by U', - [('FetchStep', [('Any X,S WHERE X in_state S, S is State, X is Note', - [{'X': 'Note', 'S': 'State'}])], - [self.cards, self.system], None, {'X': 'table0.C0', 'S': 'table0.C1'}, []), - ('UnionStep', None, None, - [('OneFetchStep', [('Any X,S,U WHERE X in_state S, X todo_by U, S is State, U is Personne, X is Affaire', - [{'X': 'Affaire', 'S': 'State', 'U': 'Personne'}])], - None, None, [self.system], {}, []), - ('OneFetchStep', [('Any X,S,U WHERE X todo_by U, S is State, U is CWUser, X is Note', - [{'X': 'Note', 'S': 'State', 'U': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C0', 'S': 'table0.C1'}, []), - ]) - ]) - - def test_relation_restriction_need_split(self): - self._test('Any X,U WHERE X in_state S, S name "pending", X todo_by U', - [('FetchStep', [('Any X WHERE X in_state S, S name "pending", S is State, X is Note', - [{'X': 'Note', 'S': 'State'}])], - [self.cards, self.system], None, {'X': 'table0.C0'}, []), - ('UnionStep', None, None, - [('OneFetchStep', [('Any X,U WHERE X todo_by U, U is CWUser, X is Note', - [{'X': 'Note', 'U': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C0'}, []), - ('OneFetchStep', [('Any X,U WHERE X in_state S, S name "pending", X todo_by U, S is State, U is Personne, X is Affaire', - [{'S': 'State', 'U': 'Personne', 'X': 'Affaire'}])], - None, None, [self.system], {}, []) - ]) - ]) - - def test_relation_restriction_ambigous_need_split(self): - self._test('Any X,T WHERE X in_state S, S name "pending", T tags X', - [('FetchStep', [('Any X WHERE X in_state S, S name "pending", S is State, X is Note', - [{'X': 'Note', 'S': 'State'}])], - [self.cards, self.system], None, {'X': 'table0.C0'}, []), - ('UnionStep', None, None, [ - ('OneFetchStep', [('Any X,T WHERE T tags X, T is Tag, X is Note', - [{'X': 'Note', 'T': 'Tag'}])], - None, None, - [self.system], {'X': 'table0.C0'}, []), - ('OneFetchStep', [('Any X,T WHERE X in_state S, S name "pending", T tags X, S is State, T is Tag, X is IN(Affaire, CWUser)', - [{'X': 'Affaire', 'S': 'State', 'T': 'Tag'}, - {'X': 'CWUser', 'S': 'State', 'T': 'Tag'}])], - None, None, - [self.system], {}, []), - ]) - ]) - - def test_not_relation_no_split_internal(self): - ueid = self.session.user.eid - # NOT on a relation supported by rql and system source: we want to get - # all states (eg from both sources) which are not related to entity with the - # given eid. The "NOT X in_state S, X eid %(x)s" expression is necessarily true - # in the source where %(x)s is not coming from and will be removed during rql - # generation for the external source - self._test('Any SN WHERE NOT X in_state S, X eid %(x)s, S name SN', - [('OneFetchStep', [('Any SN WHERE NOT EXISTS(%s in_state S), S name SN, S is State' % ueid, - [{'S': 'State', 'SN': 'String'}])], - None, None, [self.cards, self.system], {}, [])], - {'x': ueid}) - - def test_not_relation_no_split_external(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - # similar to the above test but with an eid coming from the external source. - # the same plan may be used, since we won't find any record in the system source - # linking 9999999 to a state - self._test('Any SN WHERE NOT X in_state S, X eid %(x)s, S name SN', - [('OneFetchStep', [('Any SN WHERE NOT EXISTS(999999 in_state S), S name SN, S is State', - [{'S': 'State', 'SN': 'String'}])], - None, None, [self.cards, self.system], {}, [])], - {'x': 999999}) - - def test_not_relation_need_split(self): - self._test('Any SN WHERE NOT X in_state S, S name SN', - [('FetchStep', [('Any SN,S WHERE S name SN, S is State', - [{'S': 'State', 'SN': 'String'}])], - [self.cards, self.system], None, {'S': 'table0.C1', 'S.name': 'table0.C0', 'SN': 'table0.C0'}, - []), - ('IntersectStep', None, None, - [('OneFetchStep', - [('Any SN WHERE NOT EXISTS(X in_state S, X is Note), S name SN, S is State', - [{'S': 'State', 'SN': 'String', 'X': 'Note'}])], - None, None, [self.cards, self.system], {}, - []), - ('OneFetchStep', - [('Any SN WHERE NOT EXISTS(X in_state S, X is IN(Affaire, CWUser)), S name SN, S is State', - [{'S': 'State', 'SN': 'String', 'X': 'Affaire'}, - {'S': 'State', 'SN': 'String', 'X': 'CWUser'}])], - None, None, [self.system], {'S': 'table0.C1', 'S.name': 'table0.C0', 'SN': 'table0.C0'}, - []),] - )]) - - def test_external_attributes_and_relation(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any A,B,C,D WHERE A eid %(x)s,A creation_date B,A modification_date C, A todo_by D?', - [('FetchStep', [('Any A,B,C WHERE A eid 999999, A creation_date B, A modification_date C, A is Note', - [{'A': 'Note', 'C': 'Datetime', 'B': 'Datetime'}])], - [self.cards], None, - {'A': 'table0.C0', 'A.creation_date': 'table0.C1', 'A.modification_date': 'table0.C2', 'C': 'table0.C2', 'B': 'table0.C1'}, []), - #('FetchStep', [('Any D WHERE D is CWUser', [{'D': 'CWUser'}])], - # [self.ldap, self.system], None, {'D': 'table1.C0'}, []), - ('OneFetchStep', [('Any A,B,C,D WHERE A creation_date B, A modification_date C, A todo_by D?, A is Note, D is CWUser', - [{'A': 'Note', 'C': 'Datetime', 'B': 'Datetime', 'D': 'CWUser'}])], - None, None, [self.system], - {'A': 'table0.C0', 'A.creation_date': 'table0.C1', 'A.modification_date': 'table0.C2', 'C': 'table0.C2', 'B': 'table0.C1'}, [])], - {'x': 999999}) - - - def test_simplified_var_1(self): - ueid = self.session.user.eid - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - # need access to cards source since X table has to be accessed because of the outer join - self._test('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR ' - '(X require_permission P?, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s', - [('FetchStep', - [('Any 999999', [{}])], [self.cards], - None, {u'%(x)s': 'table0.C0'}, []), - ('OneFetchStep', - [(u'Any 6 WHERE 6 in_group G, (G name IN("managers", "logilab")) OR ' - '(X require_permission P?, P name "bla", P require_group G), ' - 'G is CWGroup, P is CWPermission, X is Note', - [{'G': 'CWGroup', 'P': 'CWPermission', 'X': 'Note'}])], - None, None, [self.system], {u'%(x)s': 'table0.C0'}, [])], - {'x': 999999, 'u': ueid}) - - def test_simplified_var_2(self): - ueid = self.session.user.eid - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - # no need access to source since X is invariant - self._test('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR ' - '(X require_permission P, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s', - [('OneFetchStep', [('Any %s WHERE %s in_group G, (G name IN("managers", "logilab")) OR (999999 require_permission P, P name "bla", P require_group G)' % (ueid, ueid), - [{'G': 'CWGroup', 'P': 'CWPermission'}])], - None, None, [self.system], {}, [])], - {'x': 999999, 'u': ueid}) - - def test_has_text(self): - self._test('Card X WHERE X has_text "toto"', - [('OneFetchStep', [('Any X WHERE X has_text "toto", X is Card', - [{'X': 'Card'}])], - None, None, [self.system], {}, [])]) - - def test_has_text_3(self): - self._test('Any X WHERE X has_text "toto", X title "zoubidou", X is IN (Card, EmailThread)', - [('FetchStep', [(u'Any X WHERE X title "zoubidou", X is Card', - [{'X': 'Card'}])], - [self.cards, self.system], None, {'X': 'table0.C0'}, []), - ('UnionStep', None, None, [ - ('OneFetchStep', [(u'Any X WHERE X has_text "toto", X is Card', - [{'X': 'Card'}])], - None, None, [self.system], {'X': 'table0.C0'}, []), - ('OneFetchStep', [(u'Any X WHERE X has_text "toto", X title "zoubidou", X is EmailThread', - [{'X': 'EmailThread'}])], - None, None, [self.system], {}, []), - ]), - ]) - - def test_has_text_orderby_rank(self): - self._test('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', - [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C0'}, []), - ('AggrStep', 'SELECT table1.C1 FROM table1\nORDER BY table1.C0', None, [ - ('FetchStep', [('Any FTIRANK(X),X WHERE X has_text "bla", X is CWUser', - [{'X': 'CWUser'}])], - [self.system], {'X': 'table0.C0'}, {'FTIRANK(X)': 'table1.C0', 'X': 'table1.C1'}, []), - ('FetchStep', [('Any FTIRANK(X),X WHERE X has_text "bla", X firstname "bla", X is Personne', - [{'X': 'Personne'}])], - [self.system], {}, {'FTIRANK(X)': 'table1.C0', 'X': 'table1.C1'}, []), - ]), - ]) - - def test_security_has_text_orderby_rank(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', - [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table1.C0'}, []), - ('UnionFetchStep', - [('FetchStep', [('Any X WHERE X firstname "bla", X is Personne', [{'X': 'Personne'}])], - [self.system], {}, {'X': 'table0.C0'}, []), - ('FetchStep', [('Any X WHERE EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])], - [self.system], {'X': 'table1.C0'}, {'X': 'table0.C0'}, [])]), - ('OneFetchStep', [('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla"', - [{'X': 'CWUser'}, {'X': 'Personne'}])], - None, None, [self.system], {'X': 'table0.C0'}, []), - ]) - - def test_has_text_select_rank(self): - self._test('Any X, FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', - # XXX unecessary duplicate selection - [('FetchStep', [('Any X,X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C1'}, []), - ('UnionStep', None, None, [ - ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X is CWUser', [{'X': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C1'}, []), - ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])], - None, None, [self.system], {}, []), - ]), - ]) - - def test_security_has_text_select_rank(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X, FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', - [('FetchStep', [('Any X,X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C1'}, []), - ('UnionStep', None, None, [ - ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C1'}, []), - ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])], - None, None, [self.system], {}, []), - ]), - ]) - - def test_sort_func(self): - self._test('Note X ORDERBY DUMB_SORT(RF) WHERE X type RF', - [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY DUMB_SORT(table0.C1)', None, [ - ('FetchStep', [('Any X,RF WHERE X type RF, X is Note', - [{'X': 'Note', 'RF': 'String'}])], - [self.cards, self.system], {}, {'X': 'table0.C0', 'X.type': 'table0.C1', 'RF': 'table0.C1'}, []), - ]) - ]) - - def test_ambigous_sort_func(self): - self._test('Any X ORDERBY DUMB_SORT(RF) WHERE X title RF, X is IN (Bookmark, Card, EmailThread)', - [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY DUMB_SORT(table0.C1)', None, - [('FetchStep', [('Any X,RF WHERE X title RF, X is Card', - [{'X': 'Card', 'RF': 'String'}])], - [self.cards, self.system], {}, - {'X': 'table0.C0', 'X.title': 'table0.C1', 'RF': 'table0.C1'}, []), - ('FetchStep', [('Any X,RF WHERE X title RF, X is IN(Bookmark, EmailThread)', - [{'RF': 'String', 'X': 'Bookmark'}, - {'RF': 'String', 'X': 'EmailThread'}])], - [self.system], {}, - {'X': 'table0.C0', 'X.title': 'table0.C1', 'RF': 'table0.C1'}, []), - ]), - ]) - - def test_attr_unification_1(self): - self._test('Any X,Y WHERE X is Bookmark, Y is Card, X title T, Y title T', - [('FetchStep', - [('Any Y,T WHERE Y title T, Y is Card', [{'T': 'String', 'Y': 'Card'}])], - [self.cards, self.system], None, - {'T': 'table0.C1', 'Y': 'table0.C0', 'Y.title': 'table0.C1'}, []), - ('OneFetchStep', - [('Any X,Y WHERE X title T, Y title T, X is Bookmark, Y is Card', - [{'T': 'String', 'X': 'Bookmark', 'Y': 'Card'}])], - None, None, [self.system], - {'T': 'table0.C1', 'Y': 'table0.C0', 'Y.title': 'table0.C1'}, []) - ]) - - def test_attr_unification_2(self): - self._test('Any X,Y WHERE X is Note, Y is Card, X type T, Y title T', - [('FetchStep', - [('Any X,T WHERE X type T, X is Note', [{'T': 'String', 'X': 'Note'}])], - [self.cards, self.system], None, - {'T': 'table0.C1', 'X': 'table0.C0', 'X.type': 'table0.C1'}, []), - ('FetchStep', - [('Any Y,T WHERE Y title T, Y is Card', [{'T': 'String', 'Y': 'Card'}])], - [self.cards, self.system], None, - {'T': 'table1.C1', 'Y': 'table1.C0', 'Y.title': 'table1.C1'}, []), - ('OneFetchStep', - [('Any X,Y WHERE X type T, Y title T, X is Note, Y is Card', - [{'T': 'String', 'X': 'Note', 'Y': 'Card'}])], - None, None, [self.system], - {'T': 'table1.C1', - 'X': 'table0.C0', 'X.type': 'table0.C1', - 'Y': 'table1.C0', 'Y.title': 'table1.C1'}, []) - ]) - - def test_attr_unification_neq_1(self): - self._test('Any X,Y WHERE X is Bookmark, Y is Card, X creation_date D, Y creation_date > D', - [('FetchStep', - [('Any Y,D WHERE Y creation_date D, Y is Card', - [{'D': 'Datetime', 'Y': 'Card'}])], - [self.cards,self.system], None, - {'D': 'table0.C1', 'Y': 'table0.C0', 'Y.creation_date': 'table0.C1'}, []), - ('OneFetchStep', - [('Any X,Y WHERE X creation_date D, Y creation_date > D, X is Bookmark, Y is Card', - [{'D': 'Datetime', 'X': 'Bookmark', 'Y': 'Card'}])], None, None, - [self.system], - {'D': 'table0.C1', 'Y': 'table0.C0', 'Y.creation_date': 'table0.C1'}, []) - ]) - - def test_subquery_1(self): - ueid = self.session.user.eid - self._test('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by D), D eid %(E)s ' - 'WITH A,N BEING ((Any X,N WHERE X is Tag, X name N) UNION (Any X,T WHERE X is Bookmark, X title T))', - [('FetchStep', [('Any X,N WHERE X is Tag, X name N', [{'N': 'String', 'X': 'Tag'}]), - ('Any X,T WHERE X is Bookmark, X title T', - [{'T': 'String', 'X': 'Bookmark'}])], - [self.system], {}, {'N': 'table0.C1', 'X': 'table0.C0', 'X.name': 'table0.C1'}, []), - ('FetchStep', - [('Any B,C WHERE B login C, B is CWUser', [{'B': 'CWUser', 'C': 'String'}])], - [self.ldap, self.system], None, {'B': 'table1.C0', 'B.login': 'table1.C1', 'C': 'table1.C1'}, []), - ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by %s), B is CWUser, A is IN(Bookmark, Tag)' % ueid, - [{'A': 'Bookmark', 'B': 'CWUser', 'C': 'String'}, - {'A': 'Tag', 'B': 'CWUser', 'C': 'String'}])], - None, None, [self.system], - {'A': 'table0.C0', - 'B': 'table1.C0', 'B.login': 'table1.C1', - 'C': 'table1.C1', - 'N': 'table0.C1'}, - [])], - {'E': ueid}) - - def test_subquery_2(self): - ueid = self.session.user.eid - self._test('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by D), D eid %(E)s ' - 'WITH A,N BEING ((Any X,N WHERE X is Tag, X name N) UNION (Any X,T WHERE X is Card, X title T))', - [('UnionFetchStep', - [('FetchStep', [('Any X,N WHERE X is Tag, X name N', [{'N': 'String', 'X': 'Tag'}])], - [self.system], {}, - {'N': 'table0.C1', - 'T': 'table0.C1', - 'X': 'table0.C0', - 'X.name': 'table0.C1', - 'X.title': 'table0.C1'}, []), - ('FetchStep', [('Any X,T WHERE X is Card, X title T', - [{'T': 'String', 'X': 'Card'}])], - [self.cards, self.system], {}, - {'N': 'table0.C1', - 'T': 'table0.C1', - 'X': 'table0.C0', - 'X.name': 'table0.C1', - 'X.title': 'table0.C1'}, []), - ]), - ('FetchStep', - [('Any B,C WHERE B login C, B is CWUser', [{'B': 'CWUser', 'C': 'String'}])], - [self.ldap, self.system], None, {'B': 'table1.C0', 'B.login': 'table1.C1', 'C': 'table1.C1'}, []), - ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by %s), B is CWUser, A is IN(Card, Tag)' % ueid, - [{'A': 'Card', 'B': 'CWUser', 'C': 'String'}, - {'A': 'Tag', 'B': 'CWUser', 'C': 'String'}])], - None, None, [self.system], - {'A': 'table0.C0', - 'B': 'table1.C0', 'B.login': 'table1.C1', - 'C': 'table1.C1', - 'N': 'table0.C1'}, - [])], - {'E': ueid}) - - def test_eid_dont_cross_relation_1(self): - repo._type_source_cache[999999] = ('Personne', 'system', 999999, 'system') - self._test('Any Y,YT WHERE X eid %(x)s, X fiche Y, Y title YT', - [('OneFetchStep', [('Any Y,YT WHERE X eid 999999, X fiche Y, Y title YT', - [{'X': 'Personne', 'Y': 'Card', 'YT': 'String'}])], - None, None, [self.system], {}, [])], - {'x': 999999}) - - def test_eid_dont_cross_relation_2(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self.cards.dont_cross_relations.add('concerne') - try: - self._test('Any Y,S,YT,X WHERE Y concerne X, Y in_state S, X eid 999999, Y ref YT', - [('OneFetchStep', [('Any Y,S,YT,999999 WHERE Y concerne 999999, Y in_state S, Y ref YT', - [{'Y': 'Affaire', 'YT': 'String', 'S': 'State'}])], - None, None, [self.system], {}, [])], - {'x': 999999}) - finally: - self.cards.dont_cross_relations.remove('concerne') - - - # external source w/ .cross_relations == ['multisource_crossed_rel'] ###### - - def test_crossed_relation_eid_1_invariant(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y', - [('OneFetchStep', [('Any Y WHERE 999999 multisource_crossed_rel Y', [{u'Y': 'Note'}])], - None, None, [self.system], {}, []) - ], - {'x': 999999,}) - - def test_crossed_relation_eid_1_needattr(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T', - [('FetchStep', [('Any Y,T WHERE Y type T, Y is Note', [{'T': 'String', 'Y': 'Note'}])], - [self.cards, self.system], None, - {'T': 'table0.C1', 'Y': 'table0.C0', 'Y.type': 'table0.C1'}, []), - ('OneFetchStep', [('Any Y,T WHERE 999999 multisource_crossed_rel Y, Y type T, Y is Note', - [{'T': 'String', 'Y': 'Note'}])], - None, None, [self.system], - {'T': 'table0.C1', 'Y': 'table0.C0', 'Y.type': 'table0.C1'}, []), - ], - {'x': 999999,}) - - def test_crossed_relation_eid_2_invariant(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y', - [('OneFetchStep', [('Any Y WHERE 999999 multisource_crossed_rel Y, Y is Note', [{'Y': 'Note'}])], - None, None, [self.cards, self.system], {}, []) - ], - {'x': 999999,}) - - def test_crossed_relation_eid_2_needattr(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T', - [('OneFetchStep', [('Any Y,T WHERE 999999 multisource_crossed_rel Y, Y type T, Y is Note', - [{'T': 'String', 'Y': 'Note'}])], - None, None, [self.cards, self.system], {}, - []), - ], - {'x': 999999,}) - - def test_crossed_relation_eid_not_1(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('Any Y WHERE X eid %(x)s, NOT X multisource_crossed_rel Y', - [('FetchStep', [('Any Y WHERE Y is Note', [{'Y': 'Note'}])], - [self.cards, self.system], None, {'Y': 'table0.C0'}, []), - ('OneFetchStep', [('Any Y WHERE NOT EXISTS(999999 multisource_crossed_rel Y), Y is Note', - [{'Y': 'Note'}])], - None, None, [self.system], - {'Y': 'table0.C0'}, [])], - {'x': 999999,}) - -# def test_crossed_relation_eid_not_2(self): -# repo._type_source_cache[999999] = ('Note', 'cards', 999999) -# self._test('Any Y WHERE X eid %(x)s, NOT X multisource_crossed_rel Y', -# [], -# {'x': 999999,}) - - def test_crossed_relation_base_XXXFIXME(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('Any X,Y,T WHERE X multisource_crossed_rel Y, Y type T, X type T', - [('FetchStep', [('Any X,T WHERE X type T, X is Note', [{'T': 'String', 'X': 'Note'}])], - [self.cards, self.system], None, - {'T': 'table0.C1', 'X': 'table0.C0', 'X.type': 'table0.C1'}, []), - ('FetchStep', [('Any Y,T WHERE Y type T, Y is Note', [{'T': 'String', 'Y': 'Note'}])], - [self.cards, self.system], None, - {'T': 'table1.C1', 'Y': 'table1.C0', 'Y.type': 'table1.C1'}, []), - ('FetchStep', [('Any X,Y WHERE X multisource_crossed_rel Y, X is Note, Y is Note', - [{'X': 'Note', 'Y': 'Note'}])], - [self.cards, self.system], None, - {'X': 'table2.C0', 'Y': 'table2.C1'}, - []), - ('OneFetchStep', [('Any X,Y,T WHERE X multisource_crossed_rel Y, Y type T, X type T, ' - 'X is Note, Y is Note, Y identity A, X identity B, A is Note, B is Note', - [{u'A': 'Note', u'B': 'Note', 'T': 'String', 'X': 'Note', 'Y': 'Note'}])], - None, None, - [self.system], - {'A': 'table1.C0', - 'B': 'table0.C0', - 'T': 'table1.C1', - 'X': 'table2.C0', - 'X.type': 'table0.C1', - 'Y': 'table2.C1', - 'Y.type': 'table1.C1'}, - []), - ], - {'x': 999999,}) - - def test_crossed_relation_noeid_needattr(self): - # http://www.cubicweb.org/ticket/1382452 - self._test('DISTINCT Any DEP WHERE DEP is Note, P type "cubicweb-foo", P multisource_crossed_rel DEP, DEP type LIKE "cubicweb%"', - [('FetchStep', [(u'Any DEP WHERE DEP type LIKE "cubicweb%", DEP is Note', - [{'DEP': 'Note'}])], - [self.cards, self.system], None, - {'DEP': 'table0.C0'}, - []), - ('FetchStep', [(u'Any P WHERE P type "cubicweb-foo", P is Note', [{'P': 'Note'}])], - [self.cards, self.system], None, {'P': 'table1.C0'}, - []), - ('FetchStep', [('Any DEP,P WHERE P multisource_crossed_rel DEP, DEP is Note, P is Note', - [{'DEP': 'Note', 'P': 'Note'}])], - [self.cards, self.system], None, {'DEP': 'table2.C0', 'P': 'table2.C1'}, - []), - ('OneFetchStep', - [('DISTINCT Any DEP WHERE P multisource_crossed_rel DEP, DEP is Note, ' - 'P is Note, DEP identity A, P identity B, A is Note, B is Note', - [{u'A': 'Note', u'B': 'Note', 'DEP': 'Note', 'P': 'Note'}])], - None, None, [self.system], - {'A': 'table0.C0', 'B': 'table1.C0', 'DEP': 'table2.C0', 'P': 'table2.C1'}, - [])]) - - def test_crossed_relation_noeid_invariant(self): - # see comment in http://www.cubicweb.org/ticket/1382452 - self.schema.add_relation_def( - RelationDefinition(subject='Note', name='multisource_crossed_rel', object='Affaire')) - self.repo.set_schema(self.schema) - try: - self._test('DISTINCT Any P,DEP WHERE P type "cubicweb-foo", P multisource_crossed_rel DEP', - [('FetchStep', - [('Any DEP WHERE DEP is Note', [{'DEP': 'Note'}])], - [self.cards, self.system], None, {'DEP': 'table0.C0'}, []), - ('FetchStep', - [(u'Any P WHERE P type "cubicweb-foo", P is Note', [{'P': 'Note'}])], - [self.cards, self.system], None, {'P': 'table1.C0'}, []), - ('UnionStep', None, None, - [('OneFetchStep', - [('DISTINCT Any P,DEP WHERE P multisource_crossed_rel DEP, DEP is Note, P is Note', - [{'DEP': 'Note', 'P': 'Note'}])], - None, None, [self.cards], None, []), - ('OneFetchStep', - [('DISTINCT Any P,DEP WHERE P multisource_crossed_rel DEP, DEP is Note, P is Note', - [{'DEP': 'Note', 'P': 'Note'}])], - None, None, [self.system], - {'DEP': 'table0.C0', 'P': 'table1.C0'}, - []), - ('OneFetchStep', - [('DISTINCT Any P,DEP WHERE P multisource_crossed_rel DEP, DEP is Affaire, P is Note', - [{'DEP': 'Affaire', 'P': 'Note'}])], - None, None, [self.system], {'P': 'table1.C0'}, - [])]) - ]) - finally: - self.schema.del_relation_def('Note', 'multisource_crossed_rel', 'Affaire') - self.repo.set_schema(self.schema) - - # edition queries tests ################################################### - - def test_insert_simplified_var_1(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - repo._type_source_cache[999998] = ('State', 'system', None, 'system') - self._test('INSERT Note X: X in_state S, X type T WHERE S eid %(s)s, N eid %(n)s, N type T', - [('InsertStep', - [('InsertRelationsStep', - [('OneFetchStep', [('Any T WHERE N eid 999999, N type T, N is Note', - [{'N': 'Note', 'T': 'String'}])], - None, None, [self.cards], {}, [])]) - ]) - ], - {'n': 999999, 's': 999998}) - - def test_insert_simplified_var_2(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - repo._type_source_cache[999998] = ('State', 'system', None, 'system') - self._test('INSERT Note X: X in_state S, X type T, X migrated_from N WHERE S eid %(s)s, N eid %(n)s, N type T', - [('InsertStep', - [('InsertRelationsStep', - [('OneFetchStep', [('Any T WHERE N eid 999999, N type T, N is Note', - [{'N': 'Note', 'T': 'String'}])], - None, None, [self.cards], {}, []) - ]) - ]) - ], - {'n': 999999, 's': 999998}) - - def test_insert_simplified_var_3(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - repo._type_source_cache[999998] = ('State', 'cards', 999998, 'cards') - self._test('INSERT Note X: X in_state S, X type T WHERE S eid %(s)s, N eid %(n)s, N type T', - [('InsertStep', - [('InsertRelationsStep', - [('OneFetchStep', [('Any T WHERE N eid 999999, N type T, N is Note', - [{'N': 'Note', 'T': 'String'}])], - None, None, [self.cards], {}, [])] - )] - )], - {'n': 999999, 's': 999998}) - - def test_insert_simplified_var_4(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - repo._type_source_cache[999998] = ('State', 'system', None, 'system') - self._test('INSERT Note X: X in_state S, X type "bla", X migrated_from N WHERE S eid %(s)s, N eid %(n)s', - [('InsertStep', - [('InsertRelationsStep', - [('OneFetchStep', - [('Any 999999', [{}])], - None, None, - [self.system], {}, - [])])] - )], - {'n': 999999, 's': 999998}) - - def test_insert_simplified_var_5(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - repo._type_source_cache[999998] = ('State', 'system', None, 'system') - self._test('INSERT Note X: X in_state S, X type "bla", X migrated_from N WHERE S eid %(s)s, N eid %(n)s, A concerne N', - [('InsertStep', - [('InsertRelationsStep', - [('OneFetchStep', - [('Any A WHERE A concerne 999999, A is Affaire', - [{'A': 'Affaire'}])], - None, None, [self.system], {}, []), - ]), - ]) - ], - {'n': 999999, 's': 999998}) - - def test_delete_relation1(self): - ueid = self.session.user.eid - self._test('DELETE X created_by Y WHERE X eid %(x)s, NOT Y eid %(y)s', - [('DeleteRelationsStep', [ - ('OneFetchStep', [('Any %s,Y WHERE %s created_by Y, NOT Y eid %s, Y is CWUser' % (ueid, ueid, ueid), - [{'Y': 'CWUser'}])], - None, None, [self.system], {}, []), - ]), - ], - {'x': ueid, 'y': ueid}) - - def test_delete_relation2(self): - ueid = self.session.user.eid - self._test('DELETE X created_by Y WHERE X eid %(x)s, NOT Y login "syt"', - [('FetchStep', [('Any Y WHERE NOT Y login "syt", Y is CWUser', [{'Y': 'CWUser'}])], - [self.ldap, self.system], None, {'Y': 'table0.C0'}, []), - ('DeleteRelationsStep', [ - ('OneFetchStep', [('Any %s,Y WHERE %s created_by Y, Y is CWUser'%(ueid,ueid), [{'Y': 'CWUser'}])], - None, None, [self.system], {'Y': 'table0.C0'}, []), - ]), - ], - {'x': ueid, 'y': ueid}) - - def test_delete_relation3(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self.assertRaises( - BadRQLQuery, self._test, - 'DELETE Y multisource_inlined_rel X WHERE X eid %(x)s, ' - 'NOT (Y cw_source S, S name %(source)s)', [], - {'x': 999999, 'source': 'cards'}) - - def test_delete_relation4(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self.assertRaises( - BadRQLQuery, self._test, - 'DELETE X multisource_inlined_rel Y WHERE Y is Note, X eid %(x)s, ' - 'NOT (Y cw_source S, S name %(source)s)', [], - {'x': 999999, 'source': 'cards'}) - - def test_delete_entity1(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('DELETE Note X WHERE X eid %(x)s, NOT Y multisource_rel X', - [('DeleteEntitiesStep', - [('OneFetchStep', [('Any 999999 WHERE NOT EXISTS(Y multisource_rel 999999), Y is IN(Card, Note)', - [{'Y': 'Card'}, {'Y': 'Note'}])], - None, None, [self.system], {}, []) - ]) - ], - {'x': 999999}) - - def test_delete_entity2(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('DELETE Note X WHERE X eid %(x)s, NOT X multisource_inlined_rel Y', - [('DeleteEntitiesStep', - [('OneFetchStep', [('Any X WHERE X eid 999999, NOT X multisource_inlined_rel Y, X is Note, Y is IN(Affaire, Note)', - [{'X': 'Note', 'Y': 'Affaire'}, {'X': 'Note', 'Y': 'Note'}])], - None, None, [self.system], {}, []) - ]) - ], - {'x': 999999}) - - def test_update(self): - self._test('SET X copain Y WHERE X login "comme", Y login "cochon"', - [('FetchStep', - [('Any X WHERE X login "comme", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C0'}, []), - ('FetchStep', - [('Any Y WHERE Y login "cochon", Y is CWUser', [{'Y': 'CWUser'}])], - [self.ldap, self.system], None, {'Y': 'table1.C0'}, []), - ('UpdateStep', - [('OneFetchStep', - [('DISTINCT Any X,Y WHERE X is CWUser, Y is CWUser', - [{'X': 'CWUser', 'Y': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C0', 'Y': 'table1.C0'}, []) - ]) - ]) - - def test_update2(self): - self._test('SET U in_group G WHERE G name ~= "bougloup%", U login "admin"', - [('FetchStep', [('Any U WHERE U login "admin", U is CWUser', [{'U': 'CWUser'}])], - [self.ldap, self.system], None, {'U': 'table0.C0'}, []), - ('UpdateStep', [ - ('OneFetchStep', [('DISTINCT Any U,G WHERE G name ILIKE "bougloup%", G is CWGroup, U is CWUser', - [{'U': 'CWUser', 'G': 'CWGroup'}])], - None, None, [self.system], {'U': 'table0.C0'}, []), - ]), - ]) - - def test_update3(self): - anoneid = self.user_groups_session('guests').user.eid - # since we are adding a in_state relation for an entity in the system - # source, states should only be searched in the system source as well - self._test('SET X in_state S WHERE X eid %(x)s, S name "deactivated"', - [('UpdateStep', [ - ('OneFetchStep', [('DISTINCT Any S WHERE S name "deactivated", S is State', - [{'S': 'State'}])], - None, None, [self.system], {}, []), - ]), - ], - {'x': anoneid}) - -# def test_update4(self): -# # since we are adding a in_state relation with a state from the system -# # source, CWUser should only be searched only in the system source as well -# rset = self.execute('State X WHERE X name "activated"') -# assert len(rset) == 1, rset -# activatedeid = rset[0][0] -# self._test('SET X in_state S WHERE X is CWUser, S eid %s' % activatedeid, -# [('UpdateStep', [ -# ('OneFetchStep', [('DISTINCT Any X,%s WHERE X is CWUser' % activatedeid, -# [{'X': 'CWUser'}])], -# None, None, [self.system], {}, []), -# ]), -# ]) - - def test_ldap_user_related_to_invariant_and_dont_cross_rel(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self.cards.dont_cross_relations.add('created_by') - try: - self._test('Any X,XL WHERE E eid %(x)s, E created_by X, X login XL', - [('FetchStep', [('Any X,XL WHERE X login XL, X is CWUser', - [{'X': 'CWUser', 'XL': 'String'}])], - [self.ldap, self.system], None, - {'X': 'table0.C0', 'X.login': 'table0.C1', 'XL': 'table0.C1'}, - []), - ('OneFetchStep', - [('Any X,XL WHERE 999999 created_by X, X login XL, X is CWUser', - [{'X': 'CWUser', 'XL': 'String'}])], - None, None, - [self.system], - {'X': 'table0.C0', 'X.login': 'table0.C1', 'XL': 'table0.C1'}, - [])], - {'x': 999999}) - finally: - self.cards.dont_cross_relations.remove('created_by') - - def test_ambigous_cross_relation(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self.cards.support_relations['see_also'] = True - self.cards.cross_relations.add('see_also') - try: - self._test('Any X,AA ORDERBY AA WHERE E eid %(x)s, E see_also X, X modification_date AA', - [('AggrStep', - 'SELECT table0.C0, table0.C1 FROM table0\nORDER BY table0.C1', - None, - [('FetchStep', - [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Note', - [{'AA': 'Datetime', 'X': 'Note'}])], [self.cards, self.system], {}, - {'AA': 'table0.C1', 'X': 'table0.C0', - 'X.modification_date': 'table0.C1'}, - []), - ('FetchStep', - [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Bookmark', - [{'AA': 'Datetime', 'X': 'Bookmark'}])], - [self.system], {}, - {'AA': 'table0.C1', 'X': 'table0.C0', - 'X.modification_date': 'table0.C1'}, - [])])], - {'x': 999999}) - finally: - del self.cards.support_relations['see_also'] - self.cards.cross_relations.remove('see_also') - - def test_state_of_cross(self): - self._test('DELETE State X WHERE NOT X state_of Y', - [('DeleteEntitiesStep', - [('OneFetchStep', - [('Any X WHERE NOT X state_of Y, X is State, Y is Workflow', - [{'X': 'State', 'Y': 'Workflow'}])], - None, None, [self.system], {}, [])])] - ) - - - def test_source_specified_0_0(self): - self._test('Card X WHERE X cw_source S, S eid 1', - [('OneFetchStep', [('Any X WHERE X cw_source 1, X is Card', - [{'X': 'Card'}])], - None, None, - [self.system],{}, []) - ]) - - def test_source_specified_0_1(self): - self._test('Any X, S WHERE X is Card, X cw_source S, S eid 1', - [('OneFetchStep', [('Any X,1 WHERE X is Card, X cw_source 1', - [{'X': 'Card'}])], - None, None, - [self.system],{}, []) - ]) - - def test_source_specified_1_0(self): - self._test('Card X WHERE X cw_source S, S name "system"', - [('OneFetchStep', [('Any X WHERE X cw_source S, S name "system", X is Card', - [{'X': 'Card', 'S': 'CWSource'}])], - None, None, - [self.system],{}, []) - ]) - - def test_source_specified_1_1(self): - self._test('Any X, SN WHERE X is Card, X cw_source S, S name "system", S name SN', - [('OneFetchStep', [('Any X,SN WHERE X is Card, X cw_source S, S name "system", ' - 'S name SN', - [{'S': 'CWSource', 'SN': 'String', 'X': 'Card'}])], - None, None, [self.system], {}, []) - ]) - - def test_source_specified_1_2(self): - self._test('Card X WHERE X cw_source S, S name "datafeed"', - [('OneFetchStep', [('Any X WHERE X cw_source S, S name "datafeed", X is Card', - [{'X': 'Card', 'S': 'CWSource'}])], - None, None, - [self.system],{}, []) - ]) - - def test_source_specified_1_3(self): - self._test('Any X, SN WHERE X is Card, X cw_source S, S name "datafeed", S name SN', - [('OneFetchStep', [('Any X,SN WHERE X is Card, X cw_source S, S name "datafeed", ' - 'S name SN', - [{'S': 'CWSource', 'SN': 'String', 'X': 'Card'}])], - None, None, [self.system], {}, []) - ]) - - def test_source_specified_1_4(self): - sols = [] - for sol in X_ALL_SOLS: - sol = sol.copy() - sol['S'] = 'CWSource' - sols.append(sol) - self._test('Any X WHERE X cw_source S, S name "cards"', - [('OneFetchStep', [('Any X WHERE X cw_source S, S name "cards"', - sols)], - None, None, - [self.system],{}, []) - ]) - - def test_source_specified_2_0(self): - # self._test('Card X WHERE X cw_source S, NOT S eid 1', - # [('OneFetchStep', [('Any X WHERE X is Card', - # [{'X': 'Card'}])], - # None, None, - # [self.cards],{}, []) - # ]) - self._test('Card X WHERE NOT X cw_source S, S eid 1', - [('OneFetchStep', [('Any X WHERE X is Card', - [{'X': 'Card'}])], - None, None, - [self.cards],{}, []) - ]) - - def test_source_specified_2_1(self): - self._test('Card X WHERE X cw_source S, NOT S name "system"', - [('OneFetchStep', [('Any X WHERE X is Card', - [{'X': 'Card'}])], - None, None, - [self.cards],{}, []) - ]) - self._test('Card X WHERE NOT X cw_source S, S name "system"', - [('OneFetchStep', [('Any X WHERE X is Card', - [{'X': 'Card'}])], - None, None, - [self.cards],{}, []) - ]) - - def test_source_specified_3_1(self): - self._test('Any X,XT WHERE X is Card, X title XT, X cw_source S, S name "cards"', - [('OneFetchStep', - [('Any X,XT WHERE X is Card, X title XT', - [{'X': 'Card', 'XT': 'String'}])], - None, None, [self.cards], {}, []) - ]) - - def test_source_specified_3_2(self): - self._test('Any X,XT WHERE X is Card, X title XT, X cw_source S, S name "datafeed"', - [('OneFetchStep', - [('Any X,XT WHERE X is Card, X title XT, X cw_source S, S name "datafeed"', - [{'X': 'Card', 'XT': 'String', 'S': 'CWSource'}])], - None, None, [self.system], {}, []) - ]) - - def test_source_specified_3_3(self): - self.skipTest('oops') - self._test('Any STN WHERE X is Note, X type XT, X in_state ST, ST name STN, X cw_source S, S name "cards"', - [('OneFetchStep', - [('Any X,XT WHERE X is Card, X title XT', - [{'X': 'Card', 'XT': 'String'}])], - None, None, [self.cards], {}, []) - ]) - - def test_source_conflict_1(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - with self.assertRaises(BadRQLQuery) as cm: - self._test('Any X WHERE X cw_source S, S name "system", X eid %(x)s', - [], {'x': 999999}) - self.assertEqual(str(cm.exception), 'source conflict for term %(x)s') - - def test_source_conflict_2(self): - with self.assertRaises(BadRQLQuery) as cm: - self._test('Card X WHERE X cw_source S, S name "systeme"', []) - self.assertEqual(str(cm.exception), 'source conflict for term X') - - def test_source_conflict_3(self): - self.skipTest('oops') - self._test('CWSource X WHERE X cw_source S, S name "cards"', - [('OneFetchStep', - [(u'Any X WHERE X cw_source S, S name "cards", X is CWSource', - [{'S': 'CWSource', 'X': 'CWSource'}])], - None, None, - [self.system], - {}, [])]) - - - def test_ambigous_cross_relation_source_specified(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self.cards.support_relations['see_also'] = True - self.cards.cross_relations.add('see_also') - try: - self._test('Any X,AA ORDERBY AA WHERE E eid %(x)s, E see_also X, X modification_date AA', - [('AggrStep', - 'SELECT table0.C0, table0.C1 FROM table0\nORDER BY table0.C1', - None, - [('FetchStep', - [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Note', - [{'AA': 'Datetime', 'X': 'Note'}])], [self.cards, self.system], {}, - {'AA': 'table0.C1', 'X': 'table0.C0', - 'X.modification_date': 'table0.C1'}, - []), - ('FetchStep', - [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Bookmark', - [{'AA': 'Datetime', 'X': 'Bookmark'}])], - [self.system], {}, - {'AA': 'table0.C1', 'X': 'table0.C0', - 'X.modification_date': 'table0.C1'}, - [])])], - {'x': 999999}) - finally: - del self.cards.support_relations['see_also'] - self.cards.cross_relations.remove('see_also') - - # non regression tests #################################################### - - def test_nonregr1(self): - self._test('Any X, Y WHERE X copain Y, X login "syt", Y login "cochon"', - [('FetchStep', - [('Any X WHERE X login "syt", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C0'}, []), - ('FetchStep', - [('Any Y WHERE Y login "cochon", Y is CWUser', [{'Y': 'CWUser'}])], - [self.ldap, self.system], None, {'Y': 'table1.C0'}, []), - ('OneFetchStep', - [('Any X,Y WHERE X copain Y, X is CWUser, Y is CWUser', - [{'X': 'CWUser', 'Y': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C0', 'Y': 'table1.C0'}, []) - ]) - - def test_nonregr2(self): - iworkflowable = self.session.user.cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('deactivate') - treid = iworkflowable.latest_trinfo().eid - self._test('Any X ORDERBY D DESC WHERE E eid %(x)s, E wf_info_for X, X modification_date D', - [('FetchStep', [('Any X,D WHERE X modification_date D, X is Note', - [{'X': 'Note', 'D': 'Datetime'}])], - [self.cards, self.system], None, {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'D': 'table0.C1'}, []), - ('FetchStep', [('Any X,D WHERE X modification_date D, X is CWUser', - [{'X': 'CWUser', 'D': 'Datetime'}])], - [self.ldap, self.system], None, {'X': 'table1.C0', 'X.modification_date': 'table1.C1', 'D': 'table1.C1'}, []), - ('AggrStep', 'SELECT table2.C0 FROM table2\nORDER BY table2.C1 DESC', None, [ - ('FetchStep', [('Any X,D WHERE E eid %s, E wf_info_for X, X modification_date D, E is TrInfo, X is Affaire'%treid, - [{'X': 'Affaire', 'E': 'TrInfo', 'D': 'Datetime'}])], - [self.system], - {}, - {'X': 'table2.C0', 'X.modification_date': 'table2.C1', 'D': 'table2.C1', 'E.wf_info_for': 'table2.C0'}, []), - ('FetchStep', [('Any X,D WHERE E eid %s, E wf_info_for X, X modification_date D, E is TrInfo, X is CWUser'%treid, - [{'X': 'CWUser', 'E': 'TrInfo', 'D': 'Datetime'}])], - [self.system], - {'X': 'table1.C0', 'X.modification_date': 'table1.C1', 'D': 'table1.C1'}, - {'X': 'table2.C0', 'X.modification_date': 'table2.C1', 'D': 'table2.C1', 'E.wf_info_for': 'table2.C0'}, []), - ('FetchStep', [('Any X,D WHERE E eid %s, E wf_info_for X, X modification_date D, E is TrInfo, X is Note'%treid, - [{'X': 'Note', 'E': 'TrInfo', 'D': 'Datetime'}])], - [self.system], - {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'D': 'table0.C1'}, - {'X': 'table2.C0', 'X.modification_date': 'table2.C1', 'D': 'table2.C1', 'E.wf_info_for': 'table2.C0'}, []), - ]), - ], - {'x': treid}) - - def test_nonregr3(self): - # original jpl query: - # Any X, NOW - CD, P WHERE P is Project, U interested_in P, U is CWUser, U login "sthenault", X concerns P, X creation_date CD ORDERBY CD DESC LIMIT 5 - self._test('Any X, NOW - CD, P ORDERBY CD DESC LIMIT 5 WHERE P bookmarked_by U, U login "admin", P is X, X creation_date CD', - [('FetchStep', [('Any U WHERE U login "admin", U is CWUser', [{'U': 'CWUser'}])], - [self.ldap, self.system], None, {'U': 'table0.C0'}, []), - ('OneFetchStep', [('Any X,(NOW - CD),P ORDERBY CD DESC LIMIT 5 WHERE P bookmarked_by U, P is X, X creation_date CD, P is Bookmark, U is CWUser, X is CWEType', - [{'P': 'Bookmark', 'U': 'CWUser', 'X': 'CWEType', 'CD': 'Datetime'}])], - 5, None, [self.system], {'U': 'table0.C0'}, [])] - ) - - def test_nonregr4(self): - ueid = self.session.user.eid - self._test('Any U ORDERBY D DESC WHERE WF wf_info_for X, WF creation_date D, WF from_state FS, ' - 'WF owned_by U?, X eid %(x)s', - [#('FetchStep', [('Any U WHERE U is CWUser', [{'U': 'CWUser'}])], - # [self.ldap, self.system], None, {'U': 'table0.C0'}, []), - ('OneFetchStep', [('Any U ORDERBY D DESC WHERE WF wf_info_for %s, WF creation_date D, WF from_state FS, WF owned_by U?' % ueid, - [{'WF': 'TrInfo', 'FS': 'State', 'U': 'CWUser', 'D': 'Datetime'}])], - None, None, - [self.system], {}, [])], - {'x': ueid}) - - def test_nonregr5(self): - # original jpl query: - # DISTINCT Version V WHERE MB done_in MV, MV eid %(x)s, - # MB depends_on B, B done_in V, V version_of P, NOT P eid %(p)s' - cardeid = self.execute('INSERT Card X: X title "hop"')[0][0] - noteeid = self.execute('INSERT Note X')[0][0] - self._test('DISTINCT Card V WHERE MB documented_by MV, MV eid %(x)s, ' - 'MB depends_on B, B documented_by V, V multisource_rel P, NOT P eid %(p)s', - [('FetchStep', [('Any V WHERE V multisource_rel P, NOT P eid %s, P is Note, V is Card'%noteeid, - [{'P': 'Note', 'V': 'Card'}])], - [self.cards, self.system], None, {'V': 'table0.C0'}, []), - ('OneFetchStep', [('DISTINCT Any V WHERE MB documented_by %s, MB depends_on B, B documented_by V, B is Affaire, MB is Affaire, V is Card'%cardeid, - [{'B': 'Affaire', 'MB': 'Affaire', 'V': 'Card'}])], - None, None, [self.system], {'V': 'table0.C0'}, [])], - {'x': cardeid, 'p': noteeid}) - - def test_nonregr6(self): - self._test('Any X WHERE X concerne Y', - [('OneFetchStep', [('Any X WHERE X concerne Y', - [{'Y': 'Division', 'X': 'Affaire'}, - {'Y': 'Note', 'X': 'Affaire'}, - {'Y': 'Societe', 'X': 'Affaire'}, - {'Y': 'SubDivision', 'X': 'Affaire'}, - {'Y': 'Affaire', 'X': 'Personne'}])], - None, None, [self.system], {}, []) - ]) - self._test('Any X WHERE X concerne Y, Y is Note', - [('FetchStep', [('Any Y WHERE Y is Note', [{'Y': 'Note'}])], - [self.cards, self.system], None, {'Y': 'table0.C0'}, []), - ('OneFetchStep', [('Any X WHERE X concerne Y, X is Affaire, Y is Note', - [{'X': 'Affaire', 'Y': 'Note'}])], - None, None, [self.system], {'Y': 'table0.C0'}, []) - ]) - - def test_nonregr7(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any S,SUM(DUR),SUM(I),(SUM(I) - SUM(DUR)),MIN(DI),MAX(DI) GROUPBY S ORDERBY S WHERE A is Affaire, A duration DUR, A invoiced I, A modification_date DI, A in_state S, S name SN, (EXISTS(A concerne WP, W multisource_rel WP)) OR (EXISTS(A concerne W)), W eid %(n)s', - [('FetchStep', [('Any WP WHERE 999999 multisource_rel WP, WP is Note', [{'WP': 'Note'}])], - [self.cards], None, {'WP': u'table0.C0'}, []), - ('OneFetchStep', [('Any S,SUM(DUR),SUM(I),(SUM(I) - SUM(DUR)),MIN(DI),MAX(DI) GROUPBY S ORDERBY S WHERE A duration DUR, A invoiced I, A modification_date DI, A in_state S, S name SN, (EXISTS(A concerne WP, WP is Note)) OR (EXISTS(A concerne 999999)), A is Affaire, S is State', - [{'A': 'Affaire', 'DI': 'Datetime', 'DUR': 'Int', 'I': 'Float', 'S': 'State', 'SN': 'String', 'WP': 'Note'}])], - None, None, [self.system], {'WP': u'table0.C0'}, [])], - {'n': 999999}) - - def test_nonregr8(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any X,Z WHERE X eid %(x)s, X multisource_rel Y, Z concerne X', - [('FetchStep', [('Any 999999 WHERE 999999 multisource_rel Y, Y is Note', - [{'Y': 'Note'}])], - [self.cards], - None, {u'%(x)s': 'table0.C0'}, - []), - ('OneFetchStep', [('Any 999999,Z WHERE Z concerne 999999, Z is Affaire', - [{'Z': 'Affaire'}])], - None, None, [self.system], - {u'%(x)s': 'table0.C0'}, []), - ], - {'x': 999999}) - - def test_nonregr9(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - repo._type_source_cache[999998] = ('Note', 'cards', 999998, 'cards') - self._test('SET X migrated_from Y WHERE X eid %(x)s, Y multisource_rel Z, Z eid %(z)s, Y migrated_from Z', - [('FetchStep', [('Any Y WHERE Y multisource_rel 999998, Y is Note', [{'Y': 'Note'}])], - [self.cards], None, {'Y': u'table0.C0'}, []), - ('UpdateStep', - [('OneFetchStep', [('DISTINCT Any Y WHERE Y migrated_from 999998, Y is Note', - [{'Y': 'Note'}])], - None, None, [self.system], - {'Y': u'table0.C0'}, [])])], - {'x': 999999, 'z': 999998}) - - def test_nonregr10(self): - repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999, 'ldap') - self._test('Any X,AA,AB ORDERBY AA WHERE E eid %(x)s, E owned_by X, X login AA, X modification_date AB', - [('FetchStep', - [('Any X,AA,AB WHERE X login AA, X modification_date AB, X is CWUser', - [{'AA': 'String', 'AB': 'Datetime', 'X': 'CWUser'}])], - [self.ldap, self.system], None, {'AA': 'table0.C1', 'AB': 'table0.C2', - 'X': 'table0.C0', 'X.login': 'table0.C1', 'X.modification_date': 'table0.C2'}, - []), - ('OneFetchStep', - [('Any X,AA,AB ORDERBY AA WHERE 999999 owned_by X, X login AA, X modification_date AB, X is CWUser', - [{'AA': 'String', 'AB': 'Datetime', 'X': 'CWUser'}])], - None, None, [self.system], {'AA': 'table0.C1', 'AB': 'table0.C2', - 'X': 'table0.C0', 'X.login': 'table0.C1', 'X.modification_date': 'table0.C2'}, - []) - ], - {'x': 999999}) - - def test_nonregr11(self): - repo._type_source_cache[999999] = ('Bookmark', 'system', 999999, 'system') - self._test('SET X bookmarked_by Y WHERE X eid %(x)s, Y login "hop"', - [('UpdateStep', - [('OneFetchStep', [('DISTINCT Any Y WHERE Y login "hop", Y is CWUser', [{'Y': 'CWUser'}])], - None, None, [self.ldap, self.system], {}, [])] - )], - {'x': 999999}) - - def test_nonregr12(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any X ORDERBY Z DESC WHERE X modification_date Z, E eid %(x)s, E see_also X', - [('FetchStep', [('Any X,Z WHERE X modification_date Z, X is Note', - [{'X': 'Note', 'Z': 'Datetime'}])], - [self.cards, self.system], None, {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'Z': 'table0.C1'}, - []), - ('AggrStep', 'SELECT table1.C0 FROM table1\nORDER BY table1.C1 DESC', None, - [('FetchStep', [('Any X,Z WHERE X modification_date Z, 999999 see_also X, X is Bookmark', - [{'X': 'Bookmark', 'Z': 'Datetime'}])], - [self.system], {}, {'X': 'table1.C0', 'X.modification_date': 'table1.C1', - 'Z': 'table1.C1'}, - []), - ('FetchStep', [('Any X,Z WHERE X modification_date Z, 999999 see_also X, X is Note', - [{'X': 'Note', 'Z': 'Datetime'}])], - [self.system], {'X': 'table0.C0', 'X.modification_date': 'table0.C1', - 'Z': 'table0.C1'}, - {'X': 'table1.C0', 'X.modification_date': 'table1.C1', - 'Z': 'table1.C1'}, - [])] - )], - {'x': 999999}) - - def test_nonregr13_1(self): - ueid = self.session.user.eid - # identity wrapped into exists: - # should'nt propagate constraint that U is in the same source as ME - self._test('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File ' - 'WITH U,UL BEING (Any U,UL WHERE ME eid %(x)s, (EXISTS(U identity ME) ' - 'OR (EXISTS(U in_group G, G name IN("managers", "staff")))) ' - 'OR (EXISTS(U in_group H, ME in_group H, NOT H name "users")), U login UL, U is CWUser)', - [('FetchStep', [('Any U,UL WHERE U login UL, U is CWUser', - [{'U': 'CWUser', 'UL': 'String'}])], - [self.ldap, self.system], None, - {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'}, - []), - ('FetchStep', [('Any U,UL WHERE ((EXISTS(U identity %s)) OR (EXISTS(U in_group G, G name IN("managers", "staff"), G is CWGroup))) OR (EXISTS(U in_group H, %s in_group H, NOT H name "users", H is CWGroup)), U login UL, U is CWUser' % (ueid, ueid), - [{'G': 'CWGroup', 'H': 'CWGroup', 'U': 'CWUser', 'UL': 'String'}])], - [self.system], - {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'}, - {'U': 'table1.C0', 'U.login': 'table1.C1', 'UL': 'table1.C1'}, - []), - ('OneFetchStep', [('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File', - [{'B': 'File', 'U': 'CWUser', 'UL': 'String'}])], - None, None, [self.system], - {'U': 'table1.C0', 'UL': 'table1.C1'}, - [])], - {'x': ueid}) - - def test_nonregr13_2(self): - # identity *not* wrapped into exists. - # - # XXX this test fail since in this case, in "U identity 5" U and 5 are - # from the same scope so constraints are applied (telling the U should - # come from the same source as user with eid 5). - # - # IMO this is normal, unless we introduce a special case for the - # identity relation. BUT I think it's better to leave it as is and to - # explain constraint propagation rules, and so why this should be - # wrapped in exists() if used in multi-source - self.skipTest('take a look at me if you wish') - ueid = self.session.user.eid - self._test('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File ' - 'WITH U,UL BEING (Any U,UL WHERE ME eid %(x)s, (U identity ME ' - 'OR (EXISTS(U in_group G, G name IN("managers", "staff")))) ' - 'OR (EXISTS(U in_group H, ME in_group H, NOT H name "users")), U login UL, U is CWUser)', - [('FetchStep', [('Any U,UL WHERE U login UL, U is CWUser', - [{'U': 'CWUser', 'UL': 'String'}])], - [self.ldap, self.system], None, - {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'}, - []), - ('FetchStep', [('Any U,UL WHERE ((U identity %s) OR (EXISTS(U in_group G, G name IN("managers", "staff"), G is CWGroup))) OR (EXISTS(U in_group H, %s in_group H, NOT H name "users", H is CWGroup)), U login UL, U is CWUser' % (ueid, ueid), - [{'G': 'CWGroup', 'H': 'CWGroup', 'U': 'CWUser', 'UL': 'String'}])], - [self.system], - {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'}, - {'U': 'table1.C0', 'U.login': 'table1.C1', 'UL': 'table1.C1'}, - []), - ('OneFetchStep', [('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File', - [{'B': 'File', 'U': 'CWUser', 'UL': 'String'}])], - None, None, [self.system], - {'U': 'table1.C0', 'UL': 'table1.C1'}, - [])], - {'x': self.session.user.eid}) - - def test_nonregr14_1(self): - repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999, 'ldap') - self._test('Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s', - [('OneFetchStep', [('Any 999999 WHERE 999999 owned_by 999999', [{}])], - None, None, [self.system], {}, [])], - {'x': 999999, 'u': 999999}) - - def test_nonregr14_2(self): - repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999, 'ldap') - repo._type_source_cache[999998] = ('Note', 'system', 999998, 'system') - self._test('Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s', - [('OneFetchStep', [('Any 999998 WHERE 999998 owned_by 999999', [{}])], - None, None, [self.system], {}, [])], - {'x': 999998, 'u': 999999}) - - def test_nonregr14_3(self): - repo._type_source_cache[999999] = ('CWUser', 'system', 999999, 'system') - repo._type_source_cache[999998] = ('CWUser', 'ldap', 999998, 'ldap') - self._test('Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s', - [('OneFetchStep', [('Any 999998 WHERE 999998 owned_by 999999', [{}])], - None, None, [self.system], {}, [])], - {'x': 999998, 'u': 999999}) - - def test_nonregr_identity_no_source_access_1(self): - repo._type_source_cache[999999] = ('CWUser', 'ldap', 999998, 'ldap') - self._test('Any S WHERE S identity U, S eid %(s)s, U eid %(u)s', - [('OneFetchStep', [('Any 999999 WHERE 999999 identity 999999', [{}])], - None, None, [self.system], {}, [])], - {'s': 999999, 'u': 999999}) - - def test_nonregr_identity_no_source_access_2(self): - repo._type_source_cache[999999] = ('EmailAddress', 'system', 999999, 'system') - repo._type_source_cache[999998] = ('CWUser', 'ldap', 999998, 'ldap') - self._test('Any X WHERE O use_email X, ((EXISTS(O identity U)) OR (EXISTS(O in_group G, G name IN("managers", "staff")))) OR (EXISTS(O in_group G2, U in_group G2, NOT G2 name "users")), X eid %(x)s, U eid %(u)s', - [('OneFetchStep', [('Any 999999 WHERE O use_email 999999, ((EXISTS(O identity 999998)) OR (EXISTS(O in_group G, G name IN("managers", "staff")))) OR (EXISTS(O in_group G2, 999998 in_group G2, NOT G2 name "users"))', - [{'G': 'CWGroup', 'G2': 'CWGroup', 'O': 'CWUser'}])], - None, None, [self.system], {}, [])], - {'x': 999999, 'u': 999998}) - - def test_nonregr_similar_subquery(self): - repo._type_source_cache[999999] = ('Personne', 'system', 999999, 'system') - self._test('Any T,TD,U,T,UL WITH T,TD,U,UL BEING (' - '(Any T,TD,U,UL WHERE X eid %(x)s, T comments X, T content TD, T created_by U?, U login UL)' - ' UNION ' - '(Any T,TD,U,UL WHERE X eid %(x)s, X connait P, T comments P, T content TD, T created_by U?, U login UL))', - # XXX optimization: use a OneFetchStep with a UNION of both queries - [('FetchStep', [('Any U,UL WHERE U login UL, U is CWUser', - [{'U': 'CWUser', 'UL': 'String'}])], - [self.ldap, self.system], None, - {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'}, - []), - ('UnionFetchStep', - [('FetchStep', - [('Any T,TD,U,UL WHERE T comments 999999, T content TD, T created_by U?, U login UL, T is Comment, U is CWUser', - [{'T': 'Comment', 'TD': 'String', 'U': 'CWUser', 'UL': 'String'}])], - [self.system], - {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'}, - {'T': 'table1.C0', - 'T.content': 'table1.C1', - 'TD': 'table1.C1', - 'U': 'table1.C2', - 'U.login': 'table1.C3', - 'UL': 'table1.C3'}, - []), - ('FetchStep', - [('Any T,TD,U,UL WHERE 999999 connait P, T comments P, T content TD, T created_by U?, U login UL, P is Personne, T is Comment, U is CWUser', - [{'P': 'Personne', - 'T': 'Comment', - 'TD': 'String', - 'U': 'CWUser', - 'UL': 'String'}])], - [self.system], - {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'}, - {'T': 'table1.C0', - 'T.content': 'table1.C1', - 'TD': 'table1.C1', - 'U': 'table1.C2', - 'U.login': 'table1.C3', - 'UL': 'table1.C3'}, - [])]), - ('OneFetchStep', - [('Any T,TD,U,T,UL', - [{'T': 'Comment', 'TD': 'String', 'U': 'CWUser', 'UL': 'String'}])], - None, None, - [self.system], - {'T': 'table1.C0', 'TD': 'table1.C1', 'U': 'table1.C2', 'UL': 'table1.C3'}, - [])], - {'x': 999999}) - - def test_nonregr_dont_readd_already_processed_relation(self): - self._test('Any WO,D,SO WHERE WO is Note, D tags WO, WO in_state SO', - [('FetchStep', - [('Any WO,SO WHERE WO in_state SO, SO is State, WO is Note', - [{'SO': 'State', 'WO': 'Note'}])], - [self.cards, self.system], None, - {'SO': 'table0.C1', 'WO': 'table0.C0'}, - []), - ('OneFetchStep', - [('Any WO,D,SO WHERE D tags WO, D is Tag, SO is State, WO is Note', - [{'D': 'Tag', 'SO': 'State', 'WO': 'Note'}])], - None, None, [self.system], - {'SO': 'table0.C1', 'WO': 'table0.C0'}, - []) - ]) - -class MSPlannerTwoSameExternalSourcesTC(BasePlannerTC): - """test planner related feature on a 3-sources repository: - - * 2 rql sources supporting Card - """ - - def setUp(self): - self.__class__.repo = repo - self.setup() - self.add_source(FakeCardSource, 'cards') - self.add_source(FakeCardSource, 'cards2') - self.planner = MSPlanner(self.o.schema, self.repo.vreg.rqlhelper) - assert repo.sources_by_uri['cards2'].support_relation('multisource_crossed_rel') - assert 'multisource_crossed_rel' in repo.sources_by_uri['cards2'].cross_relations - assert repo.sources_by_uri['cards'].support_relation('multisource_crossed_rel') - assert 'multisource_crossed_rel' in repo.sources_by_uri['cards'].cross_relations - _test = test_plan - - - def test_linked_external_entities(self): - repo._type_source_cache[999999] = ('Tag', 'system', 999999, 'system') - self._test('Any X,XT WHERE X is Card, X title XT, T tags X, T eid %(t)s', - [('FetchStep', - [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], - [self.cards, self.cards2, self.system], - None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, - []), - ('OneFetchStep', - [('Any X,XT WHERE X title XT, 999999 tags X, X is Card', - [{'X': 'Card', 'XT': 'String'}])], - None, None, [self.system], - {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, - [])], - {'t': 999999}) - - def test_version_depends_on(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any X,AD,AE WHERE E eid %(x)s, E migrated_from X, X in_state AD, AD name AE', - [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - [self.cards, self.cards2, self.system], - None, {'AD': 'table0.C1', 'AD.name': 'table0.C2', - 'AE': 'table0.C2', 'X': 'table0.C0'}, - []), - ('OneFetchStep', [('Any X,AD,AE WHERE 999999 migrated_from X, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - None, None, [self.system], - {'AD': 'table0.C1', 'AD.name': 'table0.C2', 'AE': 'table0.C2', 'X': 'table0.C0'}, - [])], - {'x': 999999}) - - def test_version_crossed_depends_on_1(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE', - [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - [self.cards, self.cards2, self.system], - None, {'AD': 'table0.C1', 'AD.name': 'table0.C2', - 'AE': 'table0.C2', 'X': 'table0.C0'}, - []), - ('UnionStep', None, None, - [('OneFetchStep', [('Any X,AD,AE WHERE 999999 multisource_crossed_rel X, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - None, None, [self.cards], None, - []), - ('OneFetchStep', [('Any X,AD,AE WHERE 999999 multisource_crossed_rel X, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - None, None, [self.system], - {'AD': 'table0.C1', 'AD.name': 'table0.C2', - 'AE': 'table0.C2', 'X': 'table0.C0'}, - [])] - )], - {'x': 999999}) - - def test_version_crossed_depends_on_2(self): - self.repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE', - [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - [self.cards, self.cards2, self.system], - None, {'AD': 'table0.C1', 'AD.name': 'table0.C2', - 'AE': 'table0.C2', 'X': 'table0.C0'}, - []), - ('OneFetchStep', [('Any X,AD,AE WHERE 999999 multisource_crossed_rel X, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - None, None, [self.system], - {'AD': 'table0.C1', 'AD.name': 'table0.C2', 'AE': 'table0.C2', 'X': 'table0.C0'}, - [])], - {'x': 999999}) - - def test_version_crossed_depends_on_3(self): - self._test('Any X,AD,AE WHERE E multisource_crossed_rel X, X in_state AD, AD name AE, E is Note', - [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - [self.cards, self.cards2, self.system], - None, {'AD': 'table0.C1', 'AD.name': 'table0.C2', - 'AE': 'table0.C2', 'X': 'table0.C0'}, - []), - ('FetchStep', [('Any E WHERE E is Note', [{'E': 'Note'}])], - [self.cards, self.cards2, self.system], - None, {'E': 'table1.C0'}, - []), - ('UnionStep', None, None, - [('OneFetchStep', [('Any X,AD,AE WHERE E multisource_crossed_rel X, AD name AE, AD is State, E is Note, X is Note', - [{'AD': 'State', 'AE': 'String', 'E': 'Note', 'X': 'Note'}])], - None, None, [self.cards, self.cards2], None, - []), - ('OneFetchStep', [('Any X,AD,AE WHERE E multisource_crossed_rel X, AD name AE, AD is State, E is Note, X is Note', - [{'AD': 'State', 'AE': 'String', 'E': 'Note', 'X': 'Note'}])], - None, None, [self.system], - {'AD': 'table0.C1', - 'AD.name': 'table0.C2', - 'AE': 'table0.C2', - 'E': 'table1.C0', - 'X': 'table0.C0'}, - [])] - )] - ) - - def test_version_crossed_depends_on_4(self): - self._test('Any X,AD,AE WHERE EXISTS(E multisource_crossed_rel X), X in_state AD, AD name AE, E is Note', - [('FetchStep', - [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note', - [{'X': 'Note', 'AD': 'State', 'AE': 'String'}])], - [self.cards, self.cards2, self.system], None, - {'X': 'table0.C0', - 'AD': 'table0.C1', - 'AD.name': 'table0.C2', - 'AE': 'table0.C2'}, - []), - ('FetchStep', - [('Any A WHERE E multisource_crossed_rel A, A is Note, E is Note', - [{'A': 'Note', 'E': 'Note'}])], - [self.cards, self.cards2, self.system], None, - {'A': 'table1.C0'}, - []), - ('OneFetchStep', - [('Any X,AD,AE WHERE EXISTS(X identity A), AD name AE, A is Note, AD is State, X is Note', - [{'A': 'Note', 'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - None, None, - [self.system], - {'A': 'table1.C0', - 'AD': 'table0.C1', - 'AD.name': 'table0.C2', - 'AE': 'table0.C2', - 'X': 'table0.C0'}, - [] - )] - ) - - def test_nonregr_dont_cross_rel_source_filtering_1(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any S WHERE E eid %(x)s, E in_state S, NOT S name "moved"', - [('OneFetchStep', [('Any S WHERE 999999 in_state S, NOT S name "moved", S is State', - [{'S': 'State'}])], - None, None, [self.cards], {}, [] - )], - {'x': 999999}) - - def test_nonregr_dont_cross_rel_source_filtering_2(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB', - [('OneFetchStep', [('Any X,AA,AB WHERE 999999 in_state X, X name AA, X modification_date AB, X is State', - [{'AA': 'String', 'AB': 'Datetime', 'X': 'State'}])], - None, None, [self.cards], {}, [] - )], - {'x': 999999}) - - def test_nonregr_eid_query(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any X WHERE X eid 999999', - [('OneFetchStep', [('Any 999999', [{}])], - None, None, [self.system], {}, [] - )], - {'x': 999999}) - - - def test_nonregr_not_is(self): - self._test("Any X WHERE X owned_by U, U login 'anon', NOT X is Comment", - [('FetchStep', [('Any X WHERE X is IN(Card, Note, State)', - [{'X': 'Note'}, {'X': 'State'}, {'X': 'Card'}])], - [self.cards, self.cards2, self.system], - None, {'X': 'table0.C0'}, []), - ('UnionStep', None, None, - [('OneFetchStep', - [(u'Any X WHERE X owned_by U, U login "anon", U is CWUser, X is IN(Affaire, BaseTransition, Basket, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWDataImport, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWSourceHostConfig, CWSourceSchemaConfig, CWUniqueTogetherConstraint, CWUser, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Frozable, Old, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', - [{'U': 'CWUser', 'X': 'Affaire'}, - {'U': 'CWUser', 'X': 'BaseTransition'}, - {'U': 'CWUser', 'X': 'Basket'}, - {'U': 'CWUser', 'X': 'Bookmark'}, - {'U': 'CWUser', 'X': 'CWAttribute'}, - {'U': 'CWUser', 'X': 'CWCache'}, - {'U': 'CWUser', 'X': 'CWConstraint'}, - {'U': 'CWUser', 'X': 'CWConstraintType'}, - {'U': 'CWUser', 'X': 'CWDataImport'}, - {'U': 'CWUser', 'X': 'CWEType'}, - {'U': 'CWUser', 'X': 'CWGroup'}, - {'U': 'CWUser', 'X': 'CWPermission'}, - {'U': 'CWUser', 'X': 'CWProperty'}, - {'U': 'CWUser', 'X': 'CWRType'}, - {'U': 'CWUser', 'X': 'CWRelation'}, - {'U': 'CWUser', 'X': 'CWSource'}, - {'U': 'CWUser', 'X': 'CWSourceHostConfig'}, - {'U': 'CWUser', 'X': 'CWSourceSchemaConfig'}, - {'U': 'CWUser', 'X': 'CWUniqueTogetherConstraint'}, - {'U': 'CWUser', 'X': 'CWUser'}, - {'U': 'CWUser', 'X': 'Division'}, - {'U': 'CWUser', 'X': 'Email'}, - {'U': 'CWUser', 'X': 'EmailAddress'}, - {'U': 'CWUser', 'X': 'EmailPart'}, - {'U': 'CWUser', 'X': 'EmailThread'}, - {'U': 'CWUser', 'X': 'ExternalUri'}, - {'U': 'CWUser', 'X': 'File'}, - {'U': 'CWUser', 'X': 'Folder'}, - {'U': 'CWUser', 'X': 'Frozable'}, - {'U': 'CWUser', 'X': 'Old'}, - {'U': 'CWUser', 'X': 'Personne'}, - {'U': 'CWUser', 'X': 'RQLExpression'}, - {'U': 'CWUser', 'X': 'Societe'}, - {'U': 'CWUser', 'X': 'SubDivision'}, - {'U': 'CWUser', 'X': 'SubWorkflowExitPoint'}, - {'U': 'CWUser', 'X': 'Tag'}, - {'U': 'CWUser', 'X': 'TrInfo'}, - {'U': 'CWUser', 'X': 'Transition'}, - {'U': 'CWUser', 'X': 'Workflow'}, - {'U': 'CWUser', 'X': 'WorkflowTransition'}])], - None, None, - [self.system], {}, []), - ('OneFetchStep', - [(u'Any X WHERE X owned_by U, U login "anon", U is CWUser, X is IN(Card, Note, State)', - [{'U': 'CWUser', 'X': 'Note'}, - {'U': 'CWUser', 'X': 'State'}, - {'U': 'CWUser', 'X': 'Card'}])], - None, None, - [self.system], {'X': 'table0.C0'}, []) - ]) - ]) - - def test_remove_from_deleted_source_1(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Note X WHERE X eid 999999, NOT X cw_source Y', - [('OneFetchStep', - [('Any 999999 WHERE NOT EXISTS(999999 cw_source Y)', - [{'Y': 'CWSource'}])], - None, None, [self.system], {}, []) - ]) - - def test_remove_from_deleted_source_2(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self.repo._type_source_cache[999998] = ('Note', 'cards', 999998, 'cards') - self._test('Note X WHERE X eid IN (999998, 999999), NOT X cw_source Y', - [('FetchStep', - [('Any X WHERE X eid IN(999998, 999999), X is Note', - [{'X': 'Note'}])], - [self.cards], None, {'X': 'table0.C0'}, []), - ('OneFetchStep', - [('Any X WHERE NOT EXISTS(X cw_source Y, Y is CWSource), X is Note', - [{'X': 'Note', 'Y': 'CWSource'}])], - None, None, [self.system],{'X': 'table0.C0'}, []) - ]) - - -class FakeVCSSource(AbstractSource): - uri = 'ccc' - support_entities = {'Card': True, 'Note': True} - support_relations = {'multisource_inlined_rel': True, - 'multisource_rel': True} - - def syntax_tree_search(self, *args, **kwargs): - return [] - -class MSPlannerVCSSource(BasePlannerTC): - - def setUp(self): - self.__class__.repo = repo - self.setup() - self.add_source(FakeVCSSource, 'vcs') - self.planner = MSPlanner(self.o.schema, self.repo.vreg.rqlhelper) - _test = test_plan - - def test_multisource_inlined_rel_skipped(self): - self._test('Any MAX(VC) ' - 'WHERE VC multisource_inlined_rel R2, R para %(branch)s, VC in_state S, S name "published", ' - '(EXISTS(R identity R2)) OR (EXISTS(R multisource_rel R2))', - [('FetchStep', [('Any VC WHERE VC multisource_inlined_rel R2, R para "???", (EXISTS(R identity R2)) OR (EXISTS(R multisource_rel R2)), R is Note, R2 is Note, VC is Note', - [{'R': 'Note', 'R2': 'Note', 'VC': 'Note'}])], - [self.vcs, self.system], None, - {'VC': 'table0.C0'}, - []), - ('OneFetchStep', [(u'Any MAX(VC) WHERE VC in_state S, S name "published", S is State, VC is Note', - [{'S': 'State', 'VC': 'Note'}])], - None, None, [self.system], - {'VC': 'table0.C0'}, - []) - ]) - - def test_fully_simplified_extsource(self): - self.repo._type_source_cache[999998] = ('Note', 'vcs', 999998, 'vcs') - self.repo._type_source_cache[999999] = ('Note', 'vcs', 999999, 'vcs') - self._test('Any X, Y WHERE NOT X multisource_rel Y, X eid 999998, Y eid 999999', - [('OneFetchStep', [('Any 999998,999999 WHERE NOT EXISTS(999998 multisource_rel 999999)', [{}])], - None, None, [self.vcs], {}, []) - ]) - - def test_nonregr_fully_simplified_extsource(self): - self.repo._type_source_cache[999998] = ('Note', 'vcs', 999998, 'vcs') - self.repo._type_source_cache[999999] = ('Note', 'vcs', 999999, 'vcs') - self.repo._type_source_cache[1000000] = ('Note', 'system', 1000000, 'system') - self._test('DISTINCT Any T,FALSE,L,M WHERE L eid 1000000, M eid 999999, T eid 999998', - [('OneFetchStep', [('DISTINCT Any 999998,FALSE,1000000,999999', [{}])], - None, None, [self.system], {}, []) - ]) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 84738d495ffd -r 793377697c81 server/test/unittest_multisources.py --- a/server/test/unittest_multisources.py Wed Sep 24 17:35:59 2014 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,394 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . - -from datetime import datetime, timedelta -from itertools import repeat - -from cubicweb.devtools import TestServerConfiguration, init_test_database -from cubicweb.devtools.testlib import CubicWebTC, Tags -from cubicweb.devtools.repotest import do_monkey_patch, undo_monkey_patch -from cubicweb.devtools import get_test_db_handler - -class ExternalSource1Configuration(TestServerConfiguration): - sourcefile = 'sources_extern' - -class ExternalSource2Configuration(TestServerConfiguration): - sourcefile = 'sources_multi' - -MTIME = datetime.utcnow() - timedelta(0, 10) - -EXTERN_SOURCE_CFG = u''' -cubicweb-user = admin -cubicweb-password = gingkow -base-url=http://extern.org/ -''' - -# hi-jacking -from cubicweb.server.sources.pyrorql import PyroRQLSource -from cubicweb.dbapi import Connection - -PyroRQLSource_get_connection = PyroRQLSource.get_connection -Connection_close = Connection.close - -def add_extern_mapping(source): - source.init_mapping(zip(('Card', 'Affaire', 'State', - 'in_state', 'documented_by', 'multisource_inlined_rel'), - repeat(u'write'))) - - -def pre_setup_database_extern(session, config): - session.execute('INSERT Card X: X title "C3: An external card", X wikiid "aaa"') - session.execute('INSERT Card X: X title "C4: Ze external card", X wikiid "zzz"') - session.execute('INSERT Affaire X: X ref "AFFREF"') - session.commit() - -def pre_setup_database_multi(session, config): - session.create_entity('CWSource', name=u'extern', type=u'pyrorql', - url=u'pyro:///extern', config=EXTERN_SOURCE_CFG) - session.commit() - - -class TwoSourcesTC(CubicWebTC): - """Main repo -> extern-multi -> extern - \-------------/ - """ - test_db_id= 'cw-server-multisources' - tags = CubicWebTC.tags | Tags(('multisources')) - - @classmethod - def setUpClass(cls): - cls._cfg2 = ExternalSource1Configuration('data', apphome=TwoSourcesTC.datadir) - cls._cfg3 = ExternalSource2Configuration('data', apphome=TwoSourcesTC.datadir) - TestServerConfiguration.no_sqlite_wrap = True - # hi-jack PyroRQLSource.get_connection to access existing connection (no - # pyro connection) - PyroRQLSource.get_connection = lambda x: x.uri == 'extern-multi' and cls.cnx3 or cls.cnx2 - # also necessary since the repository is closing its initial connections - # pool though we want to keep cnx2 valid - Connection.close = lambda x: None - - @classmethod - def tearDowncls(cls): - PyroRQLSource.get_connection = PyroRQLSource_get_connection - Connection.close = Connection_close - cls.cnx2.close() - cls.cnx3.close() - TestServerConfiguration.no_sqlite_wrap = False - - @classmethod - def _init_repo(cls): - repo2_handler = get_test_db_handler(cls._cfg2) - repo2_handler.build_db_cache('4cards-1affaire',pre_setup_func=pre_setup_database_extern) - cls.repo2, cls.cnx2 = repo2_handler.get_repo_and_cnx('4cards-1affaire') - - repo3_handler = get_test_db_handler(cls._cfg3) - repo3_handler.build_db_cache('multisource',pre_setup_func=pre_setup_database_multi) - cls.repo3, cls.cnx3 = repo3_handler.get_repo_and_cnx('multisource') - - - super(TwoSourcesTC, cls)._init_repo() - - def setUp(self): - CubicWebTC.setUp(self) - self.addCleanup(self.cnx2.close) - self.addCleanup(self.cnx3.close) - do_monkey_patch() - - def tearDown(self): - for source in self.repo.sources[1:]: - self.repo.remove_source(source.uri) - CubicWebTC.tearDown(self) - self.cnx2.close() - self.cnx3.close() - undo_monkey_patch() - - @staticmethod - def pre_setup_database(session, config): - for uri, src_config in [('extern', EXTERN_SOURCE_CFG), - ('extern-multi', ''' -cubicweb-user = admin -cubicweb-password = gingkow -''')]: - source = session.create_entity('CWSource', name=unicode(uri), - type=u'pyrorql', url=u'pyro:///extern-multi', - config=unicode(src_config)) - session.commit() - add_extern_mapping(source) - - session.commit() - # trigger discovery - session.execute('Card X') - session.execute('Affaire X') - session.execute('State X') - - def setup_database(self): - cu2 = self.cnx2.cursor() - self.ec1 = cu2.execute('Any X WHERE X is Card, X title "C3: An external card", X wikiid "aaa"')[0][0] - self.aff1 = cu2.execute('Any X WHERE X is Affaire, X ref "AFFREF"')[0][0] - cu2.close() - # add some entities - self.ic1 = self.sexecute('INSERT Card X: X title "C1: An internal card", X wikiid "aaai"')[0][0] - self.ic2 = self.sexecute('INSERT Card X: X title "C2: Ze internal card", X wikiid "zzzi"')[0][0] - - def test_eid_comp(self): - rset = self.sexecute('Card X WHERE X eid > 1') - self.assertEqual(len(rset), 4) - rset = self.sexecute('Any X,T WHERE X title T, X eid > 1') - self.assertEqual(len(rset), 4) - - def test_metainformation(self): - rset = self.sexecute('Card X ORDERBY T WHERE X title T') - # 2 added to the system source, 2 added to the external source - self.assertEqual(len(rset), 4) - # since they are orderd by eid, we know the 3 first one is coming from the system source - # and the others from external source - self.assertEqual(rset.get_entity(0, 0).cw_metainformation(), - {'source': {'type': 'native', 'uri': 'system', 'use-cwuri-as-url': False}, - 'type': u'Card', 'extid': None}) - externent = rset.get_entity(3, 0) - metainf = externent.cw_metainformation() - self.assertEqual(metainf['source'], {'type': 'pyrorql', 'base-url': 'http://extern.org/', 'uri': 'extern', 'use-cwuri-as-url': False}) - self.assertEqual(metainf['type'], 'Card') - self.assert_(metainf['extid']) - etype = self.sexecute('Any ETN WHERE X is ET, ET name ETN, X eid %(x)s', - {'x': externent.eid})[0][0] - self.assertEqual(etype, 'Card') - - def test_order_limit_offset(self): - rsetbase = self.sexecute('Any W,X ORDERBY W,X WHERE X wikiid W') - self.assertEqual(len(rsetbase), 4) - self.assertEqual(sorted(rsetbase.rows), rsetbase.rows) - rset = self.sexecute('Any W,X ORDERBY W,X LIMIT 2 OFFSET 2 WHERE X wikiid W') - self.assertEqual(rset.rows, rsetbase.rows[2:4]) - - def test_has_text(self): - self.repo.sources_by_uri['extern'].synchronize(MTIME) # in case fti_update has been run before - self.assertTrue(self.sexecute('Any X WHERE X has_text "affref"')) - self.assertTrue(self.sexecute('Affaire X WHERE X has_text "affref"')) - self.assertTrue(self.sexecute('Any X ORDERBY FTIRANK(X) WHERE X has_text "affref"')) - self.assertTrue(self.sexecute('Affaire X ORDERBY FTIRANK(X) WHERE X has_text "affref"')) - - def test_anon_has_text(self): - self.repo.sources_by_uri['extern'].synchronize(MTIME) # in case fti_update has been run before - self.sexecute('INSERT Affaire X: X ref "no readable card"')[0][0] - aff1 = self.sexecute('INSERT Affaire X: X ref "card"')[0][0] - # grant read access - self.sexecute('SET X owned_by U WHERE X eid %(x)s, U login "anon"', {'x': aff1}) - self.commit() - cnx = self.login('anon') - cu = cnx.cursor() - rset = cu.execute('Any X WHERE X has_text "card"') - # 5: 4 card + 1 readable affaire - self.assertEqual(len(rset), 5, zip(rset.rows, rset.description)) - rset = cu.execute('Any X ORDERBY FTIRANK(X) WHERE X has_text "card"') - self.assertEqual(len(rset), 5, zip(rset.rows, rset.description)) - Connection_close(cnx.cnx) # cnx is a TestCaseConnectionProxy - - def test_synchronization(self): - cu = self.cnx2.cursor() - assert cu.execute('Any X WHERE X eid %(x)s', {'x': self.aff1}) - cu.execute('SET X ref "BLAH" WHERE X eid %(x)s', {'x': self.aff1}) - aff2 = cu.execute('INSERT Affaire X: X ref "AFFREUX"')[0][0] - self.cnx2.commit() - try: - # force sync - self.repo.sources_by_uri['extern'].synchronize(MTIME) - self.assertTrue(self.sexecute('Any X WHERE X has_text "blah"')) - self.assertTrue(self.sexecute('Any X WHERE X has_text "affreux"')) - cu.execute('DELETE Affaire X WHERE X eid %(x)s', {'x': aff2}) - self.cnx2.commit() - self.repo.sources_by_uri['extern'].synchronize(MTIME) - rset = self.sexecute('Any X WHERE X has_text "affreux"') - self.assertFalse(rset) - finally: - # restore state - cu.execute('SET X ref "AFFREF" WHERE X eid %(x)s', {'x': self.aff1}) - self.cnx2.commit() - - def test_simplifiable_var(self): - affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] - rset = self.sexecute('Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB', - {'x': affeid}) - self.assertEqual(len(rset), 1) - self.assertEqual(rset[0][1], "pitetre") - - def test_simplifiable_var_2(self): - affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] - rset = self.sexecute('Any E WHERE E eid %(x)s, E in_state S, NOT S name "moved"', - {'x': affeid, 'u': self.session.user.eid}) - self.assertEqual(len(rset), 1) - - def test_sort_func(self): - self.sexecute('Affaire X ORDERBY DUMB_SORT(RF) WHERE X ref RF') - - def test_sort_func_ambigous(self): - self.sexecute('Any X ORDERBY DUMB_SORT(RF) WHERE X title RF') - - def test_in_eid(self): - iec1 = self.repo.extid2eid(self.repo.sources_by_uri['extern'], str(self.ec1), - 'Card', self.session) - rset = self.sexecute('Any X WHERE X eid IN (%s, %s)' % (iec1, self.ic1)) - self.assertEqual(sorted(r[0] for r in rset.rows), sorted([iec1, self.ic1])) - - def test_greater_eid(self): - rset = self.sexecute('Any X WHERE X eid > %s' % (self.ic1 - 1)) - self.assertEqual(len(rset.rows), 2) # self.ic1 and self.ic2 - cu = self.cnx2.cursor() - ec2 = cu.execute('INSERT Card X: X title "glup"')[0][0] - self.cnx2.commit() - # 'X eid > something' should not trigger discovery - rset = self.sexecute('Any X WHERE X eid > %s' % (self.ic1 - 1)) - self.assertEqual(len(rset.rows), 2) - # trigger discovery using another query - crset = self.sexecute('Card X WHERE X title "glup"') - self.assertEqual(len(crset.rows), 1) - rset = self.sexecute('Any X WHERE X eid > %s' % (self.ic1 - 1)) - self.assertEqual(len(rset.rows), 3) - rset = self.sexecute('Any MAX(X)') - self.assertEqual(len(rset.rows), 1) - self.assertEqual(rset.rows[0][0], crset[0][0]) - - def test_attr_unification_1(self): - n1 = self.sexecute('INSERT Note X: X type "AFFREF"')[0][0] - n2 = self.sexecute('INSERT Note X: X type "AFFREU"')[0][0] - rset = self.sexecute('Any X,Y WHERE X is Note, Y is Affaire, X type T, Y ref T') - self.assertEqual(len(rset), 1, rset.rows) - - def test_attr_unification_2(self): - cu = self.cnx2.cursor() - ec2 = cu.execute('INSERT Card X: X title "AFFREF"')[0][0] - self.cnx2.commit() - try: - c1 = self.sexecute('INSERT Card C: C title "AFFREF"')[0][0] - rset = self.sexecute('Any X,Y WHERE X is Card, Y is Affaire, X title T, Y ref T') - self.assertEqual(len(rset), 2, rset.rows) - finally: - cu.execute('DELETE Card X WHERE X eid %(x)s', {'x': ec2}) - self.cnx2.commit() - - def test_attr_unification_neq_1(self): - # XXX complete - self.sexecute('Any X,Y WHERE X is Note, Y is Affaire, X creation_date D, Y creation_date > D') - - def test_attr_unification_neq_2(self): - # XXX complete - self.sexecute('Any X,Y WHERE X is Card, Y is Affaire, X creation_date D, Y creation_date > D') - - def test_union(self): - afeids = self.sexecute('Affaire X') - ueids = self.sexecute('CWUser X') - rset = self.sexecute('(Any X WHERE X is Affaire) UNION (Any X WHERE X is CWUser)') - self.assertEqual(sorted(r[0] for r in rset.rows), - sorted(r[0] for r in afeids + ueids)) - - def test_subquery1(self): - rsetbase = self.sexecute('Any W,X WITH W,X BEING (Any W,X ORDERBY W,X WHERE X wikiid W)') - self.assertEqual(len(rsetbase), 4) - self.assertEqual(sorted(rsetbase.rows), rsetbase.rows) - rset = self.sexecute('Any W,X LIMIT 2 OFFSET 2 WITH W,X BEING (Any W,X ORDERBY W,X WHERE X wikiid W)') - self.assertEqual(rset.rows, rsetbase.rows[2:4]) - rset = self.sexecute('Any W,X ORDERBY W,X LIMIT 2 OFFSET 2 WITH W,X BEING (Any W,X WHERE X wikiid W)') - self.assertEqual(rset.rows, rsetbase.rows[2:4]) - rset = self.sexecute('Any W,X WITH W,X BEING (Any W,X ORDERBY W,X LIMIT 2 OFFSET 2 WHERE X wikiid W)') - self.assertEqual(rset.rows, rsetbase.rows[2:4]) - - def test_subquery2(self): - affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] - rset = self.sexecute('Any X,AA,AB WITH X,AA,AB BEING (Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB)', - {'x': affeid}) - self.assertEqual(len(rset), 1) - self.assertEqual(rset[0][1], "pitetre") - - def test_not_relation(self): - states = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN')) - userstate = self.session.user.in_state[0] - states.remove((userstate.eid, userstate.name)) - notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s', - {'x': self.session.user.eid})) - self.assertSetEqual(notstates, states) - aff1 = self.sexecute('Any X WHERE X is Affaire, X ref "AFFREF"')[0][0] - aff1stateeid, aff1statename = self.sexecute('Any S,SN WHERE X eid %(x)s, X in_state S, S name SN', {'x': aff1})[0] - self.assertEqual(aff1statename, 'pitetre') - states.add((userstate.eid, userstate.name)) - states.remove((aff1stateeid, aff1statename)) - notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s', - {'x': aff1})) - self.assertSetEqual(notstates, states) - - def test_absolute_url_base_url(self): - cu = self.cnx2.cursor() - ceid = cu.execute('INSERT Card X: X title "without wikiid to get eid based url"')[0][0] - self.cnx2.commit() - lc = self.sexecute('Card X WHERE X title "without wikiid to get eid based url"').get_entity(0, 0) - self.assertEqual(lc.absolute_url(), 'http://extern.org/%s' % ceid) - cu.execute('DELETE Card X WHERE X eid %(x)s', {'x':ceid}) - self.cnx2.commit() - - def test_absolute_url_no_base_url(self): - cu = self.cnx3.cursor() - ceid = cu.execute('INSERT Card X: X title "without wikiid to get eid based url"')[0][0] - self.cnx3.commit() - lc = self.sexecute('Card X WHERE X title "without wikiid to get eid based url"').get_entity(0, 0) - self.assertEqual(lc.absolute_url(), 'http://testing.fr/cubicweb/%s' % lc.eid) - cu.execute('DELETE Card X WHERE X eid %(x)s', {'x':ceid}) - self.cnx3.commit() - - def test_crossed_relation_noeid_needattr(self): - """http://www.cubicweb.org/ticket/1382452""" - aff1 = self.sexecute('INSERT Affaire X: X ref "AFFREF"')[0][0] - # link within extern source - ec1 = self.sexecute('Card X WHERE X wikiid "zzz"')[0][0] - self.sexecute('SET A documented_by C WHERE E eid %(a)s, C eid %(c)s', - {'a': aff1, 'c': ec1}) - # link from system to extern source - self.sexecute('SET A documented_by C WHERE E eid %(a)s, C eid %(c)s', - {'a': aff1, 'c': self.ic2}) - rset = self.sexecute('DISTINCT Any DEP WHERE P ref "AFFREF", P documented_by DEP, DEP wikiid LIKE "z%"') - self.assertEqual(sorted(rset.rows), [[ec1], [self.ic2]]) - - def test_nonregr1(self): - ueid = self.session.user.eid - affaire = self.sexecute('Affaire X WHERE X ref "AFFREF"').get_entity(0, 0) - self.sexecute('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR (X require_permission P?, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s', - {'x': affaire.eid, 'u': ueid}) - - def test_nonregr2(self): - iworkflowable = self.session.user.cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('deactivate') - treid = iworkflowable.latest_trinfo().eid - rset = self.sexecute('Any X ORDERBY D DESC WHERE E eid %(x)s, E wf_info_for X, X modification_date D', - {'x': treid}) - self.assertEqual(len(rset), 1) - self.assertEqual(rset.rows[0], [self.session.user.eid]) - - def test_nonregr3(self): - self.sexecute('DELETE Card X WHERE X eid %(x)s, NOT X multisource_inlined_rel Y', {'x': self.ic1}) - - def test_nonregr4(self): - self.sexecute('Any X,S,U WHERE X in_state S, X todo_by U') - - def test_delete_source(self): - req = self.request() - req.execute('DELETE CWSource S WHERE S name "extern"') - self.commit() - cu = self.session.system_sql("SELECT * FROM entities WHERE source='extern'") - self.assertFalse(cu.fetchall()) - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r 84738d495ffd -r 793377697c81 server/test/unittest_postgres.py --- a/server/test/unittest_postgres.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_postgres.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -16,39 +16,53 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -import socket from datetime import datetime +from threading import Thread from logilab.common.testlib import SkipTest -from cubicweb.devtools import ApptestConfiguration +from cubicweb.devtools import PostgresApptestConfiguration from cubicweb.devtools.testlib import CubicWebTC from cubicweb.predicates import is_instance from cubicweb.entities.adapters import IFTIndexableAdapter -AT_LOGILAB = socket.gethostname().endswith('.logilab.fr') # XXX - from unittest_querier import FixedOffset class PostgresFTITC(CubicWebTC): - @classmethod - def setUpClass(cls): - if not AT_LOGILAB: # XXX here until we can raise SkipTest in setUp to detect we can't connect to the db - raise SkipTest('XXX %s: require logilab configuration' % cls.__name__) - cls.config = ApptestConfiguration('data', sourcefile='sources_postgres', - apphome=cls.datadir) + configcls = PostgresApptestConfiguration + + def test_eid_range(self): + # concurrent allocation of eid ranges + source = self.session.repo.sources_by_uri['system'] + range1 = [] + range2 = [] + def allocate_eid_ranges(session, target): + for x in xrange(1, 10): + eid = source.create_eid(session, count=x) + target.extend(range(eid-x, eid)) + + t1 = Thread(target=lambda: allocate_eid_ranges(self.session, range1)) + t2 = Thread(target=lambda: allocate_eid_ranges(self.session, range2)) + t1.start() + t2.start() + t1.join() + t2.join() + self.assertEqual(range1, sorted(range1)) + self.assertEqual(range2, sorted(range2)) + self.assertEqual(set(), set(range1) & set(range2)) def test_occurence_count(self): - req = self.request() - c1 = req.create_entity('Card', title=u'c1', - content=u'cubicweb cubicweb cubicweb') - c2 = req.create_entity('Card', title=u'c3', - content=u'cubicweb') - c3 = req.create_entity('Card', title=u'c2', - content=u'cubicweb cubicweb') - self.commit() - self.assertEqual(req.execute('Card X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows, - [(c1.eid,), (c3.eid,), (c2.eid,)]) + with self.admin_access.repo_cnx() as cnx: + c1 = cnx.create_entity('Card', title=u'c1', + content=u'cubicweb cubicweb cubicweb') + c2 = cnx.create_entity('Card', title=u'c3', + content=u'cubicweb') + c3 = cnx.create_entity('Card', title=u'c2', + content=u'cubicweb cubicweb') + cnx.commit() + self.assertEqual(cnx.execute('Card X ORDERBY FTIRANK(X) DESC ' + 'WHERE X has_text "cubicweb"').rows, + [[c1.eid,], [c3.eid,], [c2.eid,]]) def test_attr_weight(self): @@ -56,43 +70,48 @@ __select__ = is_instance('Card') attr_weight = {'title': 'A'} with self.temporary_appobjects(CardIFTIndexableAdapter): - req = self.request() - c1 = req.create_entity('Card', title=u'c1', - content=u'cubicweb cubicweb cubicweb') - c2 = req.create_entity('Card', title=u'c2', - content=u'cubicweb cubicweb') - c3 = req.create_entity('Card', title=u'cubicweb', - content=u'autre chose') - self.commit() - self.assertEqual(req.execute('Card X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows, - [(c3.eid,), (c1.eid,), (c2.eid,)]) + with self.admin_access.repo_cnx() as cnx: + c1 = cnx.create_entity('Card', title=u'c1', + content=u'cubicweb cubicweb cubicweb') + c2 = cnx.create_entity('Card', title=u'c2', + content=u'cubicweb cubicweb') + c3 = cnx.create_entity('Card', title=u'cubicweb', + content=u'autre chose') + cnx.commit() + self.assertEqual(cnx.execute('Card X ORDERBY FTIRANK(X) DESC ' + 'WHERE X has_text "cubicweb"').rows, + [[c3.eid,], [c1.eid,], [c2.eid,]]) def test_entity_weight(self): class PersonneIFTIndexableAdapter(IFTIndexableAdapter): __select__ = is_instance('Personne') entity_weight = 2.0 with self.temporary_appobjects(PersonneIFTIndexableAdapter): - req = self.request() - c1 = req.create_entity('Personne', nom=u'c1', prenom=u'cubicweb') - c2 = req.create_entity('Comment', content=u'cubicweb cubicweb', comments=c1) - c3 = req.create_entity('Comment', content=u'cubicweb cubicweb cubicweb', comments=c1) - self.commit() - self.assertEqual(req.execute('Any X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows, - [(c1.eid,), (c3.eid,), (c2.eid,)]) + with self.admin_access.repo_cnx() as cnx: + c1 = cnx.create_entity('Personne', nom=u'c1', prenom=u'cubicweb') + c2 = cnx.create_entity('Comment', content=u'cubicweb cubicweb', + comments=c1) + c3 = cnx.create_entity('Comment', content=u'cubicweb cubicweb cubicweb', + comments=c1) + cnx.commit() + self.assertEqual(cnx.execute('Any X ORDERBY FTIRANK(X) DESC ' + 'WHERE X has_text "cubicweb"').rows, + [[c1.eid,], [c3.eid,], [c2.eid,]]) def test_tz_datetime(self): - self.execute("INSERT Personne X: X nom 'bob', X tzdatenaiss %(date)s", - {'date': datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1))}) - datenaiss = self.execute("Any XD WHERE X nom 'bob', X tzdatenaiss XD")[0][0] - self.assertEqual(datenaiss.tzinfo, None) - self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 1, 0)) - self.commit() - self.execute("INSERT Personne X: X nom 'boby', X tzdatenaiss %(date)s", - {'date': datetime(1977, 6, 7, 2, 0)}) - datenaiss = self.execute("Any XD WHERE X nom 'boby', X tzdatenaiss XD")[0][0] - self.assertEqual(datenaiss.tzinfo, None) - self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 2, 0)) + with self.admin_access.repo_cnx() as cnx: + cnx.execute("INSERT Personne X: X nom 'bob', X tzdatenaiss %(date)s", + {'date': datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1))}) + datenaiss = cnx.execute("Any XD WHERE X nom 'bob', X tzdatenaiss XD")[0][0] + self.assertEqual(datenaiss.tzinfo, None) + self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 1, 0)) + cnx.commit() + cnx.execute("INSERT Personne X: X nom 'boby', X tzdatenaiss %(date)s", + {'date': datetime(1977, 6, 7, 2, 0)}) + datenaiss = cnx.execute("Any XD WHERE X nom 'boby', X tzdatenaiss XD")[0][0] + self.assertEqual(datenaiss.tzinfo, None) + self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 2, 0)) if __name__ == '__main__': from logilab.common.testlib import unittest_main diff -r 84738d495ffd -r 793377697c81 server/test/unittest_querier.py --- a/server/test/unittest_querier.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_querier.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ # -*- coding: iso-8859-1 -*- -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -27,7 +27,6 @@ from cubicweb import QueryError, Unauthorized, Binary from cubicweb.server.sqlutils import SQL_PREFIX from cubicweb.server.utils import crypt_password -from cubicweb.server.sources.native import make_schema from cubicweb.server.querier import manual_build_descr, _make_description from cubicweb.devtools import get_test_db_handler, TestServerConfiguration from cubicweb.devtools.testlib import CubicWebTC @@ -60,17 +59,6 @@ SQL_CONNECT_HOOKS['sqlite'].append(init_sqlite_connexion) -from logilab.database import _GenericAdvFuncHelper -TYPEMAP = _GenericAdvFuncHelper.TYPE_MAPPING - -class MakeSchemaTC(TestCase): - def test_known_values(self): - solution = {'A': 'String', 'B': 'CWUser'} - self.assertEqual(make_schema((Variable('A'), Variable('B')), solution, - 'table0', TYPEMAP), - ('C0 text,C1 integer', {'A': 'table0.C0', 'B': 'table0.C1'})) - - def setUpClass(cls, *args): global repo, cnx config = TestServerConfiguration(apphome=UtilsTC.datadir) @@ -122,168 +110,194 @@ pass def test_preprocess_1(self): - reid = self.execute('Any X WHERE X is CWRType, X name "owned_by"')[0][0] - rqlst = self._prepare('Any COUNT(RDEF) WHERE RDEF relation_type X, X eid %(x)s', {'x': reid}) - self.assertEqual(rqlst.solutions, [{'RDEF': 'CWAttribute'}, {'RDEF': 'CWRelation'}]) + with self.session.new_cnx() as cnx: + reid = cnx.execute('Any X WHERE X is CWRType, X name "owned_by"')[0][0] + rqlst = self._prepare(cnx, 'Any COUNT(RDEF) WHERE RDEF relation_type X, X eid %(x)s', + {'x': reid}) + self.assertEqual([{'RDEF': 'CWAttribute'}, {'RDEF': 'CWRelation'}], + rqlst.solutions) def test_preprocess_2(self): - teid = self.execute("INSERT Tag X: X name 'tag'")[0][0] - #geid = self.execute("CWGroup G WHERE G name 'users'")[0][0] - #self.execute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s", - # {'g': geid, 't': teid}, 'g') - rqlst = self._prepare('Any X WHERE E eid %(x)s, E tags X', {'x': teid}) - # the query may be optimized, should keep only one solution - # (any one, etype will be discarded) - self.assertEqual(len(rqlst.solutions), 1) + with self.session.new_cnx() as cnx: + teid = cnx.execute("INSERT Tag X: X name 'tag'")[0][0] + #geid = self.execute("CWGroup G WHERE G name 'users'")[0][0] + #self.execute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s", + # {'g': geid, 't': teid}, 'g') + rqlst = self._prepare(cnx, 'Any X WHERE E eid %(x)s, E tags X', {'x': teid}) + # the query may be optimized, should keep only one solution + # (any one, etype will be discarded) + self.assertEqual(1, len(rqlst.solutions)) + + def assertRQLEqual(self, expected, got): + from rql import parse + self.assertMultiLineEqual(unicode(parse(expected)), + unicode(parse(got))) def test_preprocess_security(self): - plan = self._prepare_plan('Any ETN,COUNT(X) GROUPBY ETN ' - 'WHERE X is ET, ET name ETN') - plan.session = self.user_groups_session('users') - union = plan.rqlst - plan.preprocess(union) - self.assertEqual(len(union.children), 1) - self.assertEqual(len(union.children[0].with_), 1) - subq = union.children[0].with_[0].query - self.assertEqual(len(subq.children), 4) - self.assertEqual([t.as_string() for t in union.children[0].selection], - ['ETN','COUNT(X)']) - self.assertEqual([t.as_string() for t in union.children[0].groupby], - ['ETN']) - partrqls = sorted(((rqlst.as_string(), rqlst.solutions) for rqlst in subq.children)) - rql, solutions = partrqls[0] - self.assertEqual(rql, - 'Any ETN,X WHERE X is ET, ET name ETN, (EXISTS(X owned_by %(B)s))' - ' OR ((((EXISTS(D concerne C?, C owned_by %(B)s, X identity D, C is Division, D is Affaire))' - ' OR (EXISTS(H concerne G?, G owned_by %(B)s, G is SubDivision, X identity H, H is Affaire)))' - ' OR (EXISTS(I concerne F?, F owned_by %(B)s, F is Societe, X identity I, I is Affaire)))' - ' OR (EXISTS(J concerne E?, E owned_by %(B)s, E is Note, X identity J, J is Affaire)))' - ', ET is CWEType, X is Affaire') - self.assertEqual(solutions, [{'C': 'Division', - 'D': 'Affaire', - 'E': 'Note', - 'F': 'Societe', - 'G': 'SubDivision', - 'H': 'Affaire', - 'I': 'Affaire', - 'J': 'Affaire', - 'X': 'Affaire', - 'ET': 'CWEType', 'ETN': 'String'}]) - rql, solutions = partrqls[1] - self.assertEqual(rql, 'Any ETN,X WHERE X is ET, ET name ETN, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWUniqueTogetherConstraint, CWUser, Card, Comment, Division, Email, EmailPart, EmailThread, ExternalUri, File, Folder, Frozable, Note, Old, Personne, RQLExpression, Societe, State, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)') - self.assertListEqual(sorted(solutions), - sorted([{'X': 'BaseTransition', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Bookmark', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Card', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Comment', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Division', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWCache', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWConstraint', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWConstraintType', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWEType', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWAttribute', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWGroup', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWRelation', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWPermission', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWProperty', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWRType', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWSource', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWUniqueTogetherConstraint', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWUser', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Email', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'EmailPart', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'EmailThread', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'ExternalUri', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'File', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Folder', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Frozable', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Note', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Old', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Personne', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'RQLExpression', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Societe', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'State', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'SubDivision', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'SubWorkflowExitPoint', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Tag', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Transition', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'TrInfo', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Workflow', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'WorkflowTransition', 'ETN': 'String', 'ET': 'CWEType'}])) - rql, solutions = partrqls[2] - self.assertEqual(rql, - 'Any ETN,X WHERE X is ET, ET name ETN, EXISTS(%(D)s use_email X), ' - 'ET is CWEType, X is EmailAddress') - self.assertEqual(solutions, [{'X': 'EmailAddress', 'ET': 'CWEType', 'ETN': 'String'}]) - rql, solutions = partrqls[3] - self.assertEqual(rql, - 'Any ETN,X WHERE X is ET, ET name ETN, EXISTS(X owned_by %(C)s), ' - 'ET is CWEType, X is Basket') - self.assertEqual(solutions, [{'X': 'Basket', 'ET': 'CWEType', 'ETN': 'String'}]) + s = self.user_groups_session('users') + with s.new_cnx() as cnx: + plan = self._prepare_plan(cnx, 'Any ETN,COUNT(X) GROUPBY ETN ' + 'WHERE X is ET, ET name ETN') + union = plan.rqlst + plan.preprocess(union) + self.assertEqual(len(union.children), 1) + self.assertEqual(len(union.children[0].with_), 1) + subq = union.children[0].with_[0].query + self.assertEqual(len(subq.children), 4) + self.assertEqual([t.as_string() for t in union.children[0].selection], + ['ETN','COUNT(X)']) + self.assertEqual([t.as_string() for t in union.children[0].groupby], + ['ETN']) + partrqls = sorted(((rqlst.as_string(), rqlst.solutions) for rqlst in subq.children)) + rql, solutions = partrqls[0] + self.assertRQLEqual(rql, + 'Any ETN,X WHERE X is ET, ET name ETN, (EXISTS(X owned_by %(B)s))' + ' OR ((((EXISTS(D concerne C?, C owned_by %(B)s, ' + ' X identity D, C is Division, D is Affaire))' + ' OR (EXISTS(H concerne G?, G owned_by %(B)s, G is SubDivision, ' + ' X identity H, H is Affaire)))' + ' OR (EXISTS(I concerne F?, F owned_by %(B)s, F is Societe, ' + ' X identity I, I is Affaire)))' + ' OR (EXISTS(J concerne E?, E owned_by %(B)s, E is Note, ' + ' X identity J, J is Affaire)))' + ', ET is CWEType, X is Affaire') + self.assertEqual(solutions, [{'C': 'Division', + 'D': 'Affaire', + 'E': 'Note', + 'F': 'Societe', + 'G': 'SubDivision', + 'H': 'Affaire', + 'I': 'Affaire', + 'J': 'Affaire', + 'X': 'Affaire', + 'ET': 'CWEType', 'ETN': 'String'}]) + rql, solutions = partrqls[1] + self.assertRQLEqual(rql, 'Any ETN,X WHERE X is ET, ET name ETN, ET is CWEType, ' + 'X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, ' + ' CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, ' + ' CWRelation, CWSource, CWUniqueTogetherConstraint, CWUser, Card, Comment, ' + ' Division, Email, EmailPart, EmailThread, ExternalUri, File, Folder, Frozable, ' + ' Note, Old, Personne, RQLExpression, Societe, State, SubDivision, ' + ' SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)') + self.assertListEqual(sorted(solutions), + sorted([{'X': 'BaseTransition', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Bookmark', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Card', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Comment', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Division', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWCache', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWConstraint', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWConstraintType', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWEType', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWAttribute', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWGroup', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWRelation', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWPermission', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWProperty', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWRType', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWSource', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWUniqueTogetherConstraint', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWUser', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Email', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'EmailPart', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'EmailThread', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'ExternalUri', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'File', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Folder', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Frozable', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Note', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Old', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Personne', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'RQLExpression', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Societe', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'State', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'SubDivision', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'SubWorkflowExitPoint', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Tag', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Transition', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'TrInfo', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Workflow', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'WorkflowTransition', 'ETN': 'String', 'ET': 'CWEType'}])) + rql, solutions = partrqls[2] + self.assertEqual(rql, + 'Any ETN,X WHERE X is ET, ET name ETN, EXISTS(%(D)s use_email X), ' + 'ET is CWEType, X is EmailAddress') + self.assertEqual(solutions, [{'X': 'EmailAddress', 'ET': 'CWEType', 'ETN': 'String'}]) + rql, solutions = partrqls[3] + self.assertEqual(rql, + 'Any ETN,X WHERE X is ET, ET name ETN, EXISTS(X owned_by %(C)s), ' + 'ET is CWEType, X is Basket') + self.assertEqual(solutions, [{'X': 'Basket', 'ET': 'CWEType', 'ETN': 'String'}]) def test_preprocess_security_aggregat(self): - plan = self._prepare_plan('Any MAX(X)') - plan.session = self.user_groups_session('users') - union = plan.rqlst - plan.preprocess(union) - self.assertEqual(len(union.children), 1) - self.assertEqual(len(union.children[0].with_), 1) - subq = union.children[0].with_[0].query - self.assertEqual(len(subq.children), 4) - self.assertEqual([t.as_string() for t in union.children[0].selection], - ['MAX(X)']) + s = self.user_groups_session('users') + with s.new_cnx() as cnx: + plan = self._prepare_plan(cnx, 'Any MAX(X)') + union = plan.rqlst + plan.preprocess(union) + self.assertEqual(len(union.children), 1) + self.assertEqual(len(union.children[0].with_), 1) + subq = union.children[0].with_[0].query + self.assertEqual(len(subq.children), 4) + self.assertEqual([t.as_string() for t in union.children[0].selection], + ['MAX(X)']) def test_preprocess_nonregr(self): - rqlst = self._prepare('Any S ORDERBY SI WHERE NOT S ecrit_par O, S para SI') - self.assertEqual(len(rqlst.solutions), 1) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any S ORDERBY SI WHERE NOT S ecrit_par O, S para SI') + self.assertEqual(len(rqlst.solutions), 1) def test_build_description(self): # should return an empty result set - rset = self.execute('Any X WHERE X eid %(x)s', {'x': self.session.user.eid}) + rset = self.qexecute('Any X WHERE X eid %(x)s', {'x': self.session.user.eid}) self.assertEqual(rset.description[0][0], 'CWUser') - rset = self.execute('Any 1') + rset = self.qexecute('Any 1') self.assertEqual(rset.description[0][0], 'Int') - rset = self.execute('Any TRUE') + rset = self.qexecute('Any TRUE') self.assertEqual(rset.description[0][0], 'Boolean') - rset = self.execute('Any "hop"') + rset = self.qexecute('Any "hop"') self.assertEqual(rset.description[0][0], 'String') - rset = self.execute('Any TODAY') + rset = self.qexecute('Any TODAY') self.assertEqual(rset.description[0][0], 'Date') - rset = self.execute('Any NOW') + rset = self.qexecute('Any NOW') self.assertEqual(rset.description[0][0], 'Datetime') - rset = self.execute('Any %(x)s', {'x': 1}) + rset = self.qexecute('Any %(x)s', {'x': 1}) self.assertEqual(rset.description[0][0], 'Int') - rset = self.execute('Any %(x)s', {'x': 1L}) + rset = self.qexecute('Any %(x)s', {'x': 1L}) self.assertEqual(rset.description[0][0], 'Int') - rset = self.execute('Any %(x)s', {'x': True}) + rset = self.qexecute('Any %(x)s', {'x': True}) self.assertEqual(rset.description[0][0], 'Boolean') - rset = self.execute('Any %(x)s', {'x': 1.0}) + rset = self.qexecute('Any %(x)s', {'x': 1.0}) self.assertEqual(rset.description[0][0], 'Float') - rset = self.execute('Any %(x)s', {'x': datetime.now()}) + rset = self.qexecute('Any %(x)s', {'x': datetime.now()}) self.assertEqual(rset.description[0][0], 'Datetime') - rset = self.execute('Any %(x)s', {'x': 'str'}) + rset = self.qexecute('Any %(x)s', {'x': 'str'}) self.assertEqual(rset.description[0][0], 'String') - rset = self.execute('Any %(x)s', {'x': u'str'}) + rset = self.qexecute('Any %(x)s', {'x': u'str'}) self.assertEqual(rset.description[0][0], 'String') def test_build_descr1(self): - rset = self.execute('(Any U,L WHERE U login L) UNION (Any G,N WHERE G name N, G is CWGroup)') - rset.req = self.session - orig_length = len(rset) - rset.rows[0][0] = 9999999 - description = manual_build_descr(rset.req, rset.syntax_tree(), None, rset.rows) - self.assertEqual(len(description), orig_length - 1) - self.assertEqual(len(rset.rows), orig_length - 1) - self.assertNotEqual(rset.rows[0][0], 9999999) + with self.session.new_cnx() as cnx: + rset = cnx.execute('(Any U,L WHERE U login L) UNION ' + '(Any G,N WHERE G name N, G is CWGroup)') + # rset.req = self.session + orig_length = len(rset) + rset.rows[0][0] = 9999999 + description = manual_build_descr(cnx, rset.syntax_tree(), None, rset.rows) + self.assertEqual(len(description), orig_length - 1) + self.assertEqual(len(rset.rows), orig_length - 1) + self.assertNotEqual(rset.rows[0][0], 9999999) def test_build_descr2(self): - rset = self.execute('Any X,Y WITH X,Y BEING ((Any G,NULL WHERE G is CWGroup) UNION (Any U,G WHERE U in_group G))') + rset = self.qexecute('Any X,Y WITH X,Y BEING ((Any G,NULL WHERE G is CWGroup) UNION ' + '(Any U,G WHERE U in_group G))') for x, y in rset.description: if y is not None: self.assertEqual(y, 'CWGroup') def test_build_descr3(self): - rset = self.execute('(Any G,NULL WHERE G is CWGroup) UNION (Any U,G WHERE U in_group G)') + rset = self.qexecute('(Any G,NULL WHERE G is CWGroup) UNION ' + '(Any U,G WHERE U in_group G)') for x, y in rset.description: if y is not None: self.assertEqual(y, 'CWGroup') @@ -294,284 +308,298 @@ tearDownClass = classmethod(tearDownClass) def test_encoding_pb(self): - self.assertRaises(RQLSyntaxError, self.execute, + self.assertRaises(RQLSyntaxError, self.qexecute, 'Any X WHERE X is CWRType, X name "öwned_by"') def test_unknown_eid(self): # should return an empty result set - self.assertFalse(self.execute('Any X WHERE X eid 99999999')) + self.assertFalse(self.qexecute('Any X WHERE X eid 99999999')) def test_typed_eid(self): # should return an empty result set - rset = self.execute('Any X WHERE X eid %(x)s', {'x': '1'}) + rset = self.qexecute('Any X WHERE X eid %(x)s', {'x': '1'}) self.assertIsInstance(rset[0][0], (int, long)) def test_bytes_storage(self): - feid = self.execute('INSERT File X: X data_name "foo.pdf", X data_format "text/plain", X data %(data)s', + feid = self.qexecute('INSERT File X: X data_name "foo.pdf", ' + 'X data_format "text/plain", X data %(data)s', {'data': Binary("xxx")})[0][0] - fdata = self.execute('Any D WHERE X data D, X eid %(x)s', {'x': feid})[0][0] + fdata = self.qexecute('Any D WHERE X data D, X eid %(x)s', {'x': feid})[0][0] self.assertIsInstance(fdata, Binary) self.assertEqual(fdata.getvalue(), 'xxx') # selection queries tests ################################################# def test_select_1(self): - rset = self.execute('Any X ORDERBY X WHERE X is CWGroup') + rset = self.qexecute('Any X ORDERBY X WHERE X is CWGroup') result, descr = rset.rows, rset.description self.assertEqual(tuplify(result), [(2,), (3,), (4,), (5,)]) self.assertEqual(descr, [('CWGroup',), ('CWGroup',), ('CWGroup',), ('CWGroup',)]) def test_select_2(self): - rset = self.execute('Any X ORDERBY N WHERE X is CWGroup, X name N') + rset = self.qexecute('Any X ORDERBY N WHERE X is CWGroup, X name N') self.assertEqual(tuplify(rset.rows), [(2,), (3,), (4,), (5,)]) self.assertEqual(rset.description, [('CWGroup',), ('CWGroup',), ('CWGroup',), ('CWGroup',)]) - rset = self.execute('Any X ORDERBY N DESC WHERE X is CWGroup, X name N') + rset = self.qexecute('Any X ORDERBY N DESC WHERE X is CWGroup, X name N') self.assertEqual(tuplify(rset.rows), [(5,), (4,), (3,), (2,)]) def test_select_3(self): - rset = self.execute('Any N GROUPBY N WHERE X is CWGroup, X name N') + rset = self.qexecute('Any N GROUPBY N WHERE X is CWGroup, X name N') result, descr = rset.rows, rset.description result.sort() self.assertEqual(tuplify(result), [('guests',), ('managers',), ('owners',), ('users',)]) self.assertEqual(descr, [('String',), ('String',), ('String',), ('String',)]) def test_select_is(self): - rset = self.execute('Any X, TN ORDERBY TN LIMIT 10 WHERE X is T, T name TN') + rset = self.qexecute('Any X, TN ORDERBY TN LIMIT 10 WHERE X is T, T name TN') result, descr = rset.rows, rset.description self.assertEqual(result[0][1], descr[0][0]) def test_select_is_aggr(self): - rset = self.execute('Any TN, COUNT(X) GROUPBY TN ORDERBY 2 DESC WHERE X is T, T name TN') + rset = self.qexecute('Any TN, COUNT(X) GROUPBY TN ORDERBY 2 DESC WHERE X is T, T name TN') result, descr = rset.rows, rset.description self.assertEqual(descr[0][0], 'String') self.assertEqual(descr[0][1], 'Int') self.assertEqual(result[0][0], 'CWRelation') # XXX may change as schema evolve def test_select_groupby_orderby(self): - rset = self.execute('Any N GROUPBY N ORDERBY N WHERE X is CWGroup, X name N') + rset = self.qexecute('Any N GROUPBY N ORDERBY N WHERE X is CWGroup, X name N') self.assertEqual(tuplify(rset.rows), [('guests',), ('managers',), ('owners',), ('users',)]) self.assertEqual(rset.description, [('String',), ('String',), ('String',), ('String',)]) def test_select_complex_groupby(self): - rset = self.execute('Any N GROUPBY N WHERE X name N') - rset = self.execute('Any N,MAX(D) GROUPBY N LIMIT 5 WHERE X name N, X creation_date D') + rset = self.qexecute('Any N GROUPBY N WHERE X name N') + rset = self.qexecute('Any N,MAX(D) GROUPBY N LIMIT 5 WHERE X name N, X creation_date D') def test_select_inlined_groupby(self): - seid = self.execute('State X WHERE X name "deactivated"')[0][0] - rset = self.execute('Any U,L,S GROUPBY U,L,S WHERE X in_state S, U login L, S eid %s' % seid) + seid = self.qexecute('State X WHERE X name "deactivated"')[0][0] + rset = self.qexecute('Any U,L,S GROUPBY U,L,S WHERE X in_state S, U login L, S eid %s' % seid) def test_select_groupby_funccall(self): - rset = self.execute('Any YEAR(CD), COUNT(X) GROUPBY YEAR(CD) WHERE X is CWUser, X creation_date CD') + rset = self.qexecute('Any YEAR(CD), COUNT(X) GROUPBY YEAR(CD) ' + 'WHERE X is CWUser, X creation_date CD') self.assertListEqual(rset.rows, [[date.today().year, 2]]) def test_select_groupby_colnumber(self): - rset = self.execute('Any YEAR(CD), COUNT(X) GROUPBY 1 WHERE X is CWUser, X creation_date CD') + rset = self.qexecute('Any YEAR(CD), COUNT(X) GROUPBY 1 ' + 'WHERE X is CWUser, X creation_date CD') self.assertListEqual(rset.rows, [[date.today().year, 2]]) def test_select_complex_orderby(self): - rset1 = self.execute('Any N ORDERBY N WHERE X name N') + rset1 = self.qexecute('Any N ORDERBY N WHERE X name N') self.assertEqual(sorted(rset1.rows), rset1.rows) - rset = self.execute('Any N ORDERBY N LIMIT 5 OFFSET 1 WHERE X name N') + rset = self.qexecute('Any N ORDERBY N LIMIT 5 OFFSET 1 WHERE X name N') self.assertEqual(rset.rows[0][0], rset1.rows[1][0]) self.assertEqual(len(rset), 5) def test_select_5(self): - rset = self.execute('Any X, TMP ORDERBY TMP WHERE X name TMP, X is CWGroup') - self.assertEqual(tuplify(rset.rows), [(2, 'guests',), (3, 'managers',), (4, 'owners',), (5, 'users',)]) - self.assertEqual(rset.description, [('CWGroup', 'String',), ('CWGroup', 'String',), ('CWGroup', 'String',), ('CWGroup', 'String',)]) + rset = self.qexecute('Any X, TMP ORDERBY TMP WHERE X name TMP, X is CWGroup') + self.assertEqual(tuplify(rset.rows), + [(2, 'guests',), + (3, 'managers',), + (4, 'owners',), + (5, 'users',)]) + self.assertEqual(rset.description, + [('CWGroup', 'String',), + ('CWGroup', 'String',), + ('CWGroup', 'String',), + ('CWGroup', 'String',)]) def test_select_6(self): - self.execute("INSERT Personne X: X nom 'bidule'")[0] - rset = self.execute('Any Y where X name TMP, Y nom in (TMP, "bidule")') + self.qexecute("INSERT Personne X: X nom 'bidule'")[0] + rset = self.qexecute('Any Y where X name TMP, Y nom in (TMP, "bidule")') #self.assertEqual(rset.description, [('Personne',), ('Personne',)]) self.assertIn(('Personne',), rset.description) - rset = self.execute('DISTINCT Any Y where X name TMP, Y nom in (TMP, "bidule")') + rset = self.qexecute('DISTINCT Any Y where X name TMP, Y nom in (TMP, "bidule")') self.assertIn(('Personne',), rset.description) def test_select_not_attr(self): - peid = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] - seid = self.execute("INSERT Societe X: X nom 'chouette'")[0][0] - rset = self.execute('Personne X WHERE NOT X nom "bidule"') + peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + seid = self.qexecute("INSERT Societe X: X nom 'chouette'")[0][0] + rset = self.qexecute('Personne X WHERE NOT X nom "bidule"') self.assertEqual(len(rset.rows), 0, rset.rows) - rset = self.execute('Personne X WHERE NOT X nom "bid"') + rset = self.qexecute('Personne X WHERE NOT X nom "bid"') self.assertEqual(len(rset.rows), 1, rset.rows) - self.execute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'") - rset = self.execute('Personne X WHERE NOT X travaille S') + self.qexecute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'") + rset = self.qexecute('Personne X WHERE NOT X travaille S') self.assertEqual(len(rset.rows), 0, rset.rows) def test_select_is_in(self): - self.execute("INSERT Personne X: X nom 'bidule'") - self.execute("INSERT Societe X: X nom 'chouette'") - self.assertEqual(len(self.execute("Any X WHERE X is IN (Personne, Societe)")), + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Societe X: X nom 'chouette'") + self.assertEqual(len(self.qexecute("Any X WHERE X is IN (Personne, Societe)")), 2) def test_select_not_rel(self): - self.execute("INSERT Personne X: X nom 'bidule'") - self.execute("INSERT Societe X: X nom 'chouette'") - self.execute("INSERT Personne X: X nom 'autre'") - self.execute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'") - rset = self.execute('Personne X WHERE NOT X travaille S') + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Societe X: X nom 'chouette'") + self.qexecute("INSERT Personne X: X nom 'autre'") + self.qexecute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'") + rset = self.qexecute('Personne X WHERE NOT X travaille S') self.assertEqual(len(rset.rows), 1, rset.rows) - rset = self.execute('Personne X WHERE NOT X travaille S, S nom "chouette"') + rset = self.qexecute('Personne X WHERE NOT X travaille S, S nom "chouette"') self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_nonregr_inlined(self): - self.execute("INSERT Note X: X para 'bidule'") - self.execute("INSERT Personne X: X nom 'chouette'") - self.execute("INSERT Personne X: X nom 'autre'") - self.execute("SET X ecrit_par P WHERE X para 'bidule', P nom 'chouette'") - rset = self.execute('Any U,T ORDERBY T DESC WHERE U is CWUser, ' - 'N ecrit_par U, N type T')#, {'x': self.ueid}) + self.qexecute("INSERT Note X: X para 'bidule'") + self.qexecute("INSERT Personne X: X nom 'chouette'") + self.qexecute("INSERT Personne X: X nom 'autre'") + self.qexecute("SET X ecrit_par P WHERE X para 'bidule', P nom 'chouette'") + rset = self.qexecute('Any U,T ORDERBY T DESC WHERE U is CWUser, ' + 'N ecrit_par U, N type T')#, {'x': self.ueid}) self.assertEqual(len(rset.rows), 0) def test_select_nonregr_edition_not(self): groupeids = set((2, 3, 4)) - groupreadperms = set(r[0] for r in self.execute('Any Y WHERE X name "CWGroup", Y eid IN(2, 3, 4), X read_permission Y')) - rset = self.execute('DISTINCT Any Y WHERE X is CWEType, X name "CWGroup", Y eid IN(2, 3, 4), NOT X read_permission Y') + groupreadperms = set(r[0] for r in self.qexecute('Any Y WHERE X name "CWGroup", ' + 'Y eid IN(2, 3, 4), X read_permission Y')) + rset = self.qexecute('DISTINCT Any Y WHERE X is CWEType, X name "CWGroup", ' + 'Y eid IN(2, 3, 4), NOT X read_permission Y') self.assertEqual(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms)) - rset = self.execute('DISTINCT Any Y WHERE X name "CWGroup", Y eid IN(2, 3, 4), NOT X read_permission Y') + rset = self.qexecute('DISTINCT Any Y WHERE X name "CWGroup", ' + 'Y eid IN(2, 3, 4), NOT X read_permission Y') self.assertEqual(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms)) def test_select_outer_join(self): - peid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] - peid2 = self.execute("INSERT Personne X: X nom 'autre'")[0][0] - seid1 = self.execute("INSERT Societe X: X nom 'chouette'")[0][0] - seid2 = self.execute("INSERT Societe X: X nom 'chouetos'")[0][0] - rset = self.execute('Any X,S ORDERBY X WHERE X travaille S?') + peid1 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + peid2 = self.qexecute("INSERT Personne X: X nom 'autre'")[0][0] + seid1 = self.qexecute("INSERT Societe X: X nom 'chouette'")[0][0] + seid2 = self.qexecute("INSERT Societe X: X nom 'chouetos'")[0][0] + rset = self.qexecute('Any X,S ORDERBY X WHERE X travaille S?') self.assertEqual(rset.rows, [[peid1, None], [peid2, None]]) - self.execute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'") - rset = self.execute('Any X,S ORDERBY X WHERE X travaille S?') + self.qexecute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'") + rset = self.qexecute('Any X,S ORDERBY X WHERE X travaille S?') self.assertEqual(rset.rows, [[peid1, seid1], [peid2, None]]) - rset = self.execute('Any S,X ORDERBY S WHERE X? travaille S') + rset = self.qexecute('Any S,X ORDERBY S WHERE X? travaille S') self.assertEqual(rset.rows, [[seid1, peid1], [seid2, None]]) def test_select_outer_join_optimized(self): - peid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] - rset = self.execute('Any X WHERE X eid %(x)s, P? connait X', {'x':peid1}) + peid1 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + rset = self.qexecute('Any X WHERE X eid %(x)s, P? connait X', {'x':peid1}) self.assertEqual(rset.rows, [[peid1]]) - rset = self.execute('Any X WHERE X eid %(x)s, X require_permission P?', + rset = self.qexecute('Any X WHERE X eid %(x)s, X require_permission P?', {'x':peid1}) self.assertEqual(rset.rows, [[peid1]]) def test_select_left_outer_join(self): - rset = self.execute('DISTINCT Any G WHERE U? in_group G') + rset = self.qexecute('DISTINCT Any G WHERE U? in_group G') self.assertEqual(len(rset), 4) - rset = self.execute('DISTINCT Any G WHERE U? in_group G, U eid %(x)s', + rset = self.qexecute('DISTINCT Any G WHERE U? in_group G, U eid %(x)s', {'x': self.session.user.eid}) self.assertEqual(len(rset), 4) def test_select_ambigous_outer_join(self): - teid = self.execute("INSERT Tag X: X name 'tag'")[0][0] - self.execute("INSERT Tag X: X name 'tagbis'")[0][0] - geid = self.execute("CWGroup G WHERE G name 'users'")[0][0] - self.execute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s", + teid = self.qexecute("INSERT Tag X: X name 'tag'")[0][0] + self.qexecute("INSERT Tag X: X name 'tagbis'")[0][0] + geid = self.qexecute("CWGroup G WHERE G name 'users'")[0][0] + self.qexecute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s", {'g': geid, 't': teid}) - rset = self.execute("Any GN,TN ORDERBY GN WHERE T? tags G, T name TN, G name GN") + rset = self.qexecute("Any GN,TN ORDERBY GN WHERE T? tags G, T name TN, G name GN") self.assertIn(['users', 'tag'], rset.rows) self.assertIn(['activated', None], rset.rows) - rset = self.execute("Any GN,TN ORDERBY GN WHERE T tags G?, T name TN, G name GN") + rset = self.qexecute("Any GN,TN ORDERBY GN WHERE T tags G?, T name TN, G name GN") self.assertEqual(rset.rows, [[None, 'tagbis'], ['users', 'tag']]) def test_select_not_inline_rel(self): - self.execute("INSERT Personne X: X nom 'bidule'") - self.execute("INSERT Note X: X type 'a'") - self.execute("INSERT Note X: X type 'b'") - self.execute("SET X ecrit_par Y WHERE X type 'a', Y nom 'bidule'") - rset = self.execute('Note X WHERE NOT X ecrit_par P') + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Note X: X type 'a'") + self.qexecute("INSERT Note X: X type 'b'") + self.qexecute("SET X ecrit_par Y WHERE X type 'a', Y nom 'bidule'") + rset = self.qexecute('Note X WHERE NOT X ecrit_par P') self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_not_unlinked_multiple_solutions(self): - self.execute("INSERT Personne X: X nom 'bidule'") - self.execute("INSERT Note X: X type 'a'") - self.execute("INSERT Note X: X type 'b'") - self.execute("SET Y evaluee X WHERE X type 'a', Y nom 'bidule'") - rset = self.execute('Note X WHERE NOT Y evaluee X') + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Note X: X type 'a'") + self.qexecute("INSERT Note X: X type 'b'") + self.qexecute("SET Y evaluee X WHERE X type 'a', Y nom 'bidule'") + rset = self.qexecute('Note X WHERE NOT Y evaluee X') self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_date_extraction(self): - self.execute("INSERT Personne X: X nom 'foo', X datenaiss %(d)s", + self.qexecute("INSERT Personne X: X nom 'foo', X datenaiss %(d)s", {'d': datetime(2001, 2,3, 12,13)}) test_data = [('YEAR', 2001), ('MONTH', 2), ('DAY', 3), ('HOUR', 12), ('MINUTE', 13), ('WEEKDAY', 6)] for funcname, result in test_data: - rset = self.execute('Any %s(D) WHERE X is Personne, X datenaiss D' + rset = self.qexecute('Any %s(D) WHERE X is Personne, X datenaiss D' % funcname) self.assertEqual(len(rset.rows), 1) self.assertEqual(rset.rows[0][0], result) self.assertEqual(rset.description, [('Int',)]) def test_regexp_based_pattern_matching(self): - peid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] - peid2 = self.execute("INSERT Personne X: X nom 'cidule'")[0][0] - rset = self.execute('Any X WHERE X is Personne, X nom REGEXP "^b"') + peid1 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + peid2 = self.qexecute("INSERT Personne X: X nom 'cidule'")[0][0] + rset = self.qexecute('Any X WHERE X is Personne, X nom REGEXP "^b"') self.assertEqual(len(rset.rows), 1, rset.rows) self.assertEqual(rset.rows[0][0], peid1) - rset = self.execute('Any X WHERE X is Personne, X nom REGEXP "idu"') + rset = self.qexecute('Any X WHERE X is Personne, X nom REGEXP "idu"') self.assertEqual(len(rset.rows), 2, rset.rows) def test_select_aggregat_count(self): - rset = self.execute('Any COUNT(X)') + rset = self.qexecute('Any COUNT(X)') self.assertEqual(len(rset.rows), 1) self.assertEqual(len(rset.rows[0]), 1) self.assertEqual(rset.description, [('Int',)]) def test_select_aggregat_sum(self): - rset = self.execute('Any SUM(O) WHERE X ordernum O') + rset = self.qexecute('Any SUM(O) WHERE X ordernum O') self.assertEqual(len(rset.rows), 1) self.assertEqual(len(rset.rows[0]), 1) self.assertEqual(rset.description, [('Int',)]) def test_select_aggregat_min(self): - rset = self.execute('Any MIN(X) WHERE X is Personne') + rset = self.qexecute('Any MIN(X) WHERE X is Personne') self.assertEqual(len(rset.rows), 1) self.assertEqual(len(rset.rows[0]), 1) self.assertEqual(rset.description, [('Personne',)]) - rset = self.execute('Any MIN(O) WHERE X ordernum O') + rset = self.qexecute('Any MIN(O) WHERE X ordernum O') self.assertEqual(len(rset.rows), 1) self.assertEqual(len(rset.rows[0]), 1) self.assertEqual(rset.description, [('Int',)]) def test_select_aggregat_max(self): - rset = self.execute('Any MAX(X) WHERE X is Personne') + rset = self.qexecute('Any MAX(X) WHERE X is Personne') self.assertEqual(len(rset.rows), 1) self.assertEqual(len(rset.rows[0]), 1) self.assertEqual(rset.description, [('Personne',)]) - rset = self.execute('Any MAX(O) WHERE X ordernum O') + rset = self.qexecute('Any MAX(O) WHERE X ordernum O') self.assertEqual(len(rset.rows), 1) self.assertEqual(len(rset.rows[0]), 1) self.assertEqual(rset.description, [('Int',)]) def test_select_custom_aggregat_concat_string(self): - rset = self.execute('Any GROUP_CONCAT(N) WHERE X is CWGroup, X name N') + rset = self.qexecute('Any GROUP_CONCAT(N) WHERE X is CWGroup, X name N') self.assertTrue(rset) self.assertEqual(sorted(rset[0][0].split(', ')), ['guests', 'managers', 'owners', 'users']) def test_select_custom_regproc_limit_size(self): - rset = self.execute('Any TEXT_LIMIT_SIZE(N, 3) WHERE X is CWGroup, X name N, X name "managers"') + rset = self.qexecute('Any TEXT_LIMIT_SIZE(N, 3) WHERE X is CWGroup, X name N, X name "managers"') self.assertTrue(rset) self.assertEqual(rset[0][0], 'man...') - self.execute("INSERT Basket X: X name 'bidule', X description 'hop hop', X description_format 'text/html'") - rset = self.execute('Any LIMIT_SIZE(D, DF, 3) WHERE X is Basket, X description D, X description_format DF') + self.qexecute("INSERT Basket X: X name 'bidule', X description 'hop hop', X description_format 'text/html'") + rset = self.qexecute('Any LIMIT_SIZE(D, DF, 3) WHERE X is Basket, X description D, X description_format DF') self.assertTrue(rset) self.assertEqual(rset[0][0], 'hop...') def test_select_regproc_orderby(self): - rset = self.execute('DISTINCT Any X,N ORDERBY GROUP_SORT_VALUE(N) WHERE X is CWGroup, X name N, X name "managers"') + rset = self.qexecute('DISTINCT Any X,N ORDERBY GROUP_SORT_VALUE(N) WHERE X is CWGroup, X name N, X name "managers"') self.assertEqual(len(rset), 1) self.assertEqual(rset[0][1], 'managers') - rset = self.execute('Any X,N ORDERBY GROUP_SORT_VALUE(N) WHERE X is CWGroup, X name N, NOT U in_group X, U login "admin"') + rset = self.qexecute('Any X,N ORDERBY GROUP_SORT_VALUE(N) WHERE X is CWGroup, X name N, NOT U in_group X, U login "admin"') self.assertEqual(len(rset), 3) self.assertEqual(rset[0][1], 'owners') def test_select_aggregat_sort(self): - rset = self.execute('Any G, COUNT(U) GROUPBY G ORDERBY 2 WHERE U in_group G') + rset = self.qexecute('Any G, COUNT(U) GROUPBY G ORDERBY 2 WHERE U in_group G') self.assertEqual(len(rset.rows), 2) self.assertEqual(len(rset.rows[0]), 2) self.assertEqual(rset.description[0], ('CWGroup', 'Int',)) def test_select_aggregat_having(self): - rset = self.execute('Any N,COUNT(RDEF) GROUPBY N ORDERBY 2,N ' + rset = self.qexecute('Any N,COUNT(RDEF) GROUPBY N ORDERBY 2,N ' 'WHERE RT name N, RDEF relation_type RT ' 'HAVING COUNT(RDEF) > 10') self.assertListEqual(rset.rows, @@ -590,21 +618,21 @@ def test_select_aggregat_having_dumb(self): # dumb but should not raise an error - rset = self.execute('Any U,COUNT(X) GROUPBY U ' + rset = self.qexecute('Any U,COUNT(X) GROUPBY U ' 'WHERE U eid %(x)s, X owned_by U ' 'HAVING COUNT(X) > 10', {'x': self.ueid}) self.assertEqual(len(rset.rows), 1) self.assertEqual(rset.rows[0][0], self.ueid) def test_select_having_non_aggregat_1(self): - rset = self.execute('Any L WHERE X login L, X creation_date CD ' + rset = self.qexecute('Any L WHERE X login L, X creation_date CD ' 'HAVING YEAR(CD) = %s' % date.today().year) self.assertListEqual(rset.rows, [[u'admin'], [u'anon']]) def test_select_having_non_aggregat_2(self): - rset = self.execute('Any L GROUPBY L WHERE X login L, X in_group G, ' + rset = self.qexecute('Any L GROUPBY L WHERE X login L, X in_group G, ' 'X creation_date CD HAVING YEAR(CD) = %s OR COUNT(G) > 1' % date.today().year) self.assertListEqual(rset.rows, @@ -613,226 +641,225 @@ def test_select_complex_sort(self): """need sqlite including http://www.sqlite.org/cvstrac/tktview?tn=3773 fix""" - rset = self.execute('Any X ORDERBY X,D LIMIT 5 WHERE X creation_date D') + rset = self.qexecute('Any X ORDERBY X,D LIMIT 5 WHERE X creation_date D') result = rset.rows result.sort() self.assertEqual(tuplify(result), [(1,), (2,), (3,), (4,), (5,)]) def test_select_upper(self): - rset = self.execute('Any X, UPPER(L) ORDERBY L WHERE X is CWUser, X login L') + rset = self.qexecute('Any X, UPPER(L) ORDERBY L WHERE X is CWUser, X login L') self.assertEqual(len(rset.rows), 2) self.assertEqual(rset.rows[0][1], 'ADMIN') self.assertEqual(rset.description[0], ('CWUser', 'String',)) self.assertEqual(rset.rows[1][1], 'ANON') self.assertEqual(rset.description[1], ('CWUser', 'String',)) eid = rset.rows[0][0] - rset = self.execute('Any UPPER(L) WHERE X eid %s, X login L'%eid) + rset = self.qexecute('Any UPPER(L) WHERE X eid %s, X login L'%eid) self.assertEqual(rset.rows[0][0], 'ADMIN') self.assertEqual(rset.description, [('String',)]) def test_select_float_abs(self): # test positive number - eid = self.execute('INSERT Affaire A: A invoiced %(i)s', {'i': 1.2})[0][0] - rset = self.execute('Any ABS(I) WHERE X eid %(x)s, X invoiced I', {'x': eid}) + eid = self.qexecute('INSERT Affaire A: A invoiced %(i)s', {'i': 1.2})[0][0] + rset = self.qexecute('Any ABS(I) WHERE X eid %(x)s, X invoiced I', {'x': eid}) self.assertEqual(rset.rows[0][0], 1.2) # test negative number - eid = self.execute('INSERT Affaire A: A invoiced %(i)s', {'i': -1.2})[0][0] - rset = self.execute('Any ABS(I) WHERE X eid %(x)s, X invoiced I', {'x': eid}) + eid = self.qexecute('INSERT Affaire A: A invoiced %(i)s', {'i': -1.2})[0][0] + rset = self.qexecute('Any ABS(I) WHERE X eid %(x)s, X invoiced I', {'x': eid}) self.assertEqual(rset.rows[0][0], 1.2) def test_select_int_abs(self): # test positive number - eid = self.execute('INSERT Affaire A: A duration %(d)s', {'d': 12})[0][0] - rset = self.execute('Any ABS(D) WHERE X eid %(x)s, X duration D', {'x': eid}) + eid = self.qexecute('INSERT Affaire A: A duration %(d)s', {'d': 12})[0][0] + rset = self.qexecute('Any ABS(D) WHERE X eid %(x)s, X duration D', {'x': eid}) self.assertEqual(rset.rows[0][0], 12) # test negative number - eid = self.execute('INSERT Affaire A: A duration %(d)s', {'d': -12})[0][0] - rset = self.execute('Any ABS(D) WHERE X eid %(x)s, X duration D', {'x': eid}) + eid = self.qexecute('INSERT Affaire A: A duration %(d)s', {'d': -12})[0][0] + rset = self.qexecute('Any ABS(D) WHERE X eid %(x)s, X duration D', {'x': eid}) self.assertEqual(rset.rows[0][0], 12) ## def test_select_simplified(self): ## ueid = self.session.user.eid -## rset = self.execute('Any L WHERE %s login L'%ueid) +## rset = self.qexecute('Any L WHERE %s login L'%ueid) ## self.assertEqual(rset.rows[0][0], 'admin') -## rset = self.execute('Any L WHERE %(x)s login L', {'x':ueid}) +## rset = self.qexecute('Any L WHERE %(x)s login L', {'x':ueid}) ## self.assertEqual(rset.rows[0][0], 'admin') def test_select_searchable_text_1(self): - rset = self.execute(u"INSERT Personne X: X nom 'bidüle'") - rset = self.execute(u"INSERT Societe X: X nom 'bidüle'") - rset = self.execute("INSERT Societe X: X nom 'chouette'") - self.commit() - rset = self.execute('Any X where X has_text %(text)s', {'text': u'bidüle'}) + rset = self.qexecute(u"INSERT Personne X: X nom 'bidüle'") + rset = self.qexecute(u"INSERT Societe X: X nom 'bidüle'") + rset = self.qexecute("INSERT Societe X: X nom 'chouette'") + rset = self.qexecute('Any X where X has_text %(text)s', {'text': u'bidüle'}) self.assertEqual(len(rset.rows), 2, rset.rows) - rset = self.execute(u'Any N where N has_text "bidüle"') + rset = self.qexecute(u'Any N where N has_text "bidüle"') self.assertEqual(len(rset.rows), 2, rset.rows) biduleeids = [r[0] for r in rset.rows] - rset = self.execute(u'Any N where NOT N has_text "bidüle"') + rset = self.qexecute(u'Any N where NOT N has_text "bidüle"') self.assertFalse([r[0] for r in rset.rows if r[0] in biduleeids]) # duh? - rset = self.execute('Any X WHERE X has_text %(text)s', {'text': u'ça'}) + rset = self.qexecute('Any X WHERE X has_text %(text)s', {'text': u'ça'}) def test_select_searchable_text_2(self): - rset = self.execute("INSERT Personne X: X nom 'bidule'") - rset = self.execute("INSERT Personne X: X nom 'chouette'") - rset = self.execute("INSERT Societe X: X nom 'bidule'") - self.commit() - rset = self.execute('Personne N where N has_text "bidule"') + rset = self.qexecute("INSERT Personne X: X nom 'bidule'") + rset = self.qexecute("INSERT Personne X: X nom 'chouette'") + rset = self.qexecute("INSERT Societe X: X nom 'bidule'") + rset = self.qexecute('Personne N where N has_text "bidule"') self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_searchable_text_3(self): - rset = self.execute("INSERT Personne X: X nom 'bidule', X sexe 'M'") - rset = self.execute("INSERT Personne X: X nom 'bidule', X sexe 'F'") - rset = self.execute("INSERT Societe X: X nom 'bidule'") - self.commit() - rset = self.execute('Any X where X has_text "bidule" and X sexe "M"') + rset = self.qexecute("INSERT Personne X: X nom 'bidule', X sexe 'M'") + rset = self.qexecute("INSERT Personne X: X nom 'bidule', X sexe 'F'") + rset = self.qexecute("INSERT Societe X: X nom 'bidule'") + rset = self.qexecute('Any X where X has_text "bidule" and X sexe "M"') self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_multiple_searchable_text(self): - self.execute(u"INSERT Personne X: X nom 'bidüle'") - self.execute("INSERT Societe X: X nom 'chouette', S travaille X") - self.execute(u"INSERT Personne X: X nom 'bidüle'") - self.commit() - rset = self.execute('Personne X WHERE X has_text %(text)s, X travaille S, S has_text %(text2)s', + self.qexecute(u"INSERT Personne X: X nom 'bidüle'") + self.qexecute("INSERT Societe X: X nom 'chouette', S travaille X") + self.qexecute(u"INSERT Personne X: X nom 'bidüle'") + rset = self.qexecute('Personne X WHERE X has_text %(text)s, X travaille S, S has_text %(text2)s', {'text': u'bidüle', 'text2': u'chouette',} ) self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_no_descr(self): - rset = self.execute('Any X WHERE X is CWGroup', build_descr=0) + rset = self.qexecute('Any X WHERE X is CWGroup', build_descr=0) rset.rows.sort() self.assertEqual(tuplify(rset.rows), [(2,), (3,), (4,), (5,)]) self.assertEqual(rset.description, ()) def test_select_limit_offset(self): - rset = self.execute('CWGroup X ORDERBY N LIMIT 2 WHERE X name N') + rset = self.qexecute('CWGroup X ORDERBY N LIMIT 2 WHERE X name N') self.assertEqual(tuplify(rset.rows), [(2,), (3,)]) self.assertEqual(rset.description, [('CWGroup',), ('CWGroup',)]) - rset = self.execute('CWGroup X ORDERBY N LIMIT 2 OFFSET 2 WHERE X name N') + rset = self.qexecute('CWGroup X ORDERBY N LIMIT 2 OFFSET 2 WHERE X name N') self.assertEqual(tuplify(rset.rows), [(4,), (5,)]) def test_select_symmetric(self): - self.execute("INSERT Personne X: X nom 'machin'") - self.execute("INSERT Personne X: X nom 'bidule'") - self.execute("INSERT Personne X: X nom 'chouette'") - self.execute("INSERT Personne X: X nom 'trucmuche'") - self.execute("SET X connait Y WHERE X nom 'chouette', Y nom 'bidule'") - self.execute("SET X connait Y WHERE X nom 'machin', Y nom 'chouette'") - rset = self.execute('Any P WHERE P connait P2') + self.qexecute("INSERT Personne X: X nom 'machin'") + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Personne X: X nom 'chouette'") + self.qexecute("INSERT Personne X: X nom 'trucmuche'") + self.qexecute("SET X connait Y WHERE X nom 'chouette', Y nom 'bidule'") + self.qexecute("SET X connait Y WHERE X nom 'machin', Y nom 'chouette'") + rset = self.qexecute('Any P WHERE P connait P2') self.assertEqual(len(rset.rows), 4, rset.rows) - rset = self.execute('Any P WHERE NOT P connait P2') + rset = self.qexecute('Any P WHERE NOT P connait P2') self.assertEqual(len(rset.rows), 1, rset.rows) # trucmuche - rset = self.execute('Any P WHERE P connait P2, P2 nom "bidule"') + rset = self.qexecute('Any P WHERE P connait P2, P2 nom "bidule"') self.assertEqual(len(rset.rows), 1, rset.rows) - rset = self.execute('Any P WHERE P2 connait P, P2 nom "bidule"') + rset = self.qexecute('Any P WHERE P2 connait P, P2 nom "bidule"') self.assertEqual(len(rset.rows), 1, rset.rows) - rset = self.execute('Any P WHERE P connait P2, P2 nom "chouette"') + rset = self.qexecute('Any P WHERE P connait P2, P2 nom "chouette"') self.assertEqual(len(rset.rows), 2, rset.rows) - rset = self.execute('Any P WHERE P2 connait P, P2 nom "chouette"') + rset = self.qexecute('Any P WHERE P2 connait P, P2 nom "chouette"') self.assertEqual(len(rset.rows), 2, rset.rows) def test_select_inline(self): - self.execute("INSERT Personne X: X nom 'bidule'") - self.execute("INSERT Note X: X type 'a'") - self.execute("SET X ecrit_par Y WHERE X type 'a', Y nom 'bidule'") - rset = self.execute('Any N where N ecrit_par X, X nom "bidule"') + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Note X: X type 'a'") + self.qexecute("SET X ecrit_par Y WHERE X type 'a', Y nom 'bidule'") + rset = self.qexecute('Any N where N ecrit_par X, X nom "bidule"') self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_creation_date(self): - self.execute("INSERT Personne X: X nom 'bidule'") - rset = self.execute('Any D WHERE X nom "bidule", X creation_date D') + self.qexecute("INSERT Personne X: X nom 'bidule'") + rset = self.qexecute('Any D WHERE X nom "bidule", X creation_date D') self.assertEqual(len(rset.rows), 1) def test_select_or_relation(self): - self.execute("INSERT Personne X: X nom 'bidule'") - self.execute("INSERT Personne X: X nom 'chouette'") - self.execute("INSERT Societe X: X nom 'logilab'") - self.execute("INSERT Societe X: X nom 'caesium'") - self.execute("SET P travaille S WHERE P nom 'bidule', S nom 'logilab'") - rset = self.execute('DISTINCT Any P WHERE P travaille S1 OR P travaille S2, S1 nom "logilab", S2 nom "caesium"') + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Personne X: X nom 'chouette'") + self.qexecute("INSERT Societe X: X nom 'logilab'") + self.qexecute("INSERT Societe X: X nom 'caesium'") + self.qexecute("SET P travaille S WHERE P nom 'bidule', S nom 'logilab'") + rset = self.qexecute('DISTINCT Any P WHERE P travaille S1 OR P travaille S2, ' + 'S1 nom "logilab", S2 nom "caesium"') self.assertEqual(len(rset.rows), 1) - self.execute("SET P travaille S WHERE P nom 'chouette', S nom 'caesium'") - rset = self.execute('DISTINCT Any P WHERE P travaille S1 OR P travaille S2, S1 nom "logilab", S2 nom "caesium"') + self.qexecute("SET P travaille S WHERE P nom 'chouette', S nom 'caesium'") + rset = self.qexecute('DISTINCT Any P WHERE P travaille S1 OR P travaille S2, ' + 'S1 nom "logilab", S2 nom "caesium"') self.assertEqual(len(rset.rows), 2) def test_select_or_sym_relation(self): - self.execute("INSERT Personne X: X nom 'bidule'") - self.execute("INSERT Personne X: X nom 'chouette'") - self.execute("INSERT Personne X: X nom 'truc'") - self.execute("SET P connait S WHERE P nom 'bidule', S nom 'chouette'") - rset = self.execute('DISTINCT Any P WHERE S connait P, S nom "chouette"') + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Personne X: X nom 'chouette'") + self.qexecute("INSERT Personne X: X nom 'truc'") + self.qexecute("SET P connait S WHERE P nom 'bidule', S nom 'chouette'") + rset = self.qexecute('DISTINCT Any P WHERE S connait P, S nom "chouette"') self.assertEqual(len(rset.rows), 1, rset.rows) - rset = self.execute('DISTINCT Any P WHERE P connait S or S connait P, S nom "chouette"') + rset = self.qexecute('DISTINCT Any P WHERE P connait S or S connait P, S nom "chouette"') self.assertEqual(len(rset.rows), 1, rset.rows) - self.execute("SET P connait S WHERE P nom 'chouette', S nom 'truc'") - rset = self.execute('DISTINCT Any P WHERE S connait P, S nom "chouette"') + self.qexecute("SET P connait S WHERE P nom 'chouette', S nom 'truc'") + rset = self.qexecute('DISTINCT Any P WHERE S connait P, S nom "chouette"') self.assertEqual(len(rset.rows), 2, rset.rows) - rset = self.execute('DISTINCT Any P WHERE P connait S OR S connait P, S nom "chouette"') + rset = self.qexecute('DISTINCT Any P WHERE P connait S OR S connait P, S nom "chouette"') self.assertEqual(len(rset.rows), 2, rset.rows) def test_select_follow_relation(self): - self.execute("INSERT Affaire X: X sujet 'cool'") - self.execute("INSERT Societe X: X nom 'chouette'") - self.execute("SET A concerne S WHERE A is Affaire, S is Societe") - self.execute("INSERT Note X: X para 'truc'") - self.execute("SET S evaluee N WHERE S is Societe, N is Note") - self.execute("INSERT Societe X: X nom 'bidule'") - self.execute("INSERT Note X: X para 'troc'") - self.execute("SET S evaluee N WHERE S nom 'bidule', N para 'troc'") - rset = self.execute('DISTINCT Any A,N WHERE A concerne S, S evaluee N') + self.qexecute("INSERT Affaire X: X sujet 'cool'") + self.qexecute("INSERT Societe X: X nom 'chouette'") + self.qexecute("SET A concerne S WHERE A is Affaire, S is Societe") + self.qexecute("INSERT Note X: X para 'truc'") + self.qexecute("SET S evaluee N WHERE S is Societe, N is Note") + self.qexecute("INSERT Societe X: X nom 'bidule'") + self.qexecute("INSERT Note X: X para 'troc'") + self.qexecute("SET S evaluee N WHERE S nom 'bidule', N para 'troc'") + rset = self.qexecute('DISTINCT Any A,N WHERE A concerne S, S evaluee N') self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_ordered_distinct_1(self): self.assertRaises(BadRQLQuery, - self.execute, 'DISTINCT Any S ORDERBY R WHERE A is Affaire, A sujet S, A ref R') + self.qexecute, 'DISTINCT Any S ORDERBY R WHERE A is Affaire, A sujet S, A ref R') def test_select_ordered_distinct_2(self): - self.execute("INSERT Affaire X: X sujet 'minor'") - self.execute("INSERT Affaire X: X sujet 'zou'") - self.execute("INSERT Affaire X: X sujet 'abcd'") - rset = self.execute('DISTINCT Any S ORDERBY S WHERE A is Affaire, A sujet S') + self.qexecute("INSERT Affaire X: X sujet 'minor'") + self.qexecute("INSERT Affaire X: X sujet 'zou'") + self.qexecute("INSERT Affaire X: X sujet 'abcd'") + rset = self.qexecute('DISTINCT Any S ORDERBY S WHERE A is Affaire, A sujet S') self.assertEqual(rset.rows, [['abcd'], ['minor'], ['zou']]) def test_select_ordered_distinct_3(self): - rset = self.execute('DISTINCT Any N ORDERBY GROUP_SORT_VALUE(N) WHERE X is CWGroup, X name N') + rset = self.qexecute('DISTINCT Any N ORDERBY GROUP_SORT_VALUE(N) WHERE X is CWGroup, X name N') self.assertEqual(rset.rows, [['owners'], ['guests'], ['users'], ['managers']]) def test_select_or_value(self): - rset = self.execute('Any U WHERE U in_group G, G name "owners" OR G name "users"') + rset = self.qexecute('Any U WHERE U in_group G, G name "owners" OR G name "users"') self.assertEqual(len(rset.rows), 0) - rset = self.execute('Any U WHERE U in_group G, G name "guests" OR G name "managers"') + rset = self.qexecute('Any U WHERE U in_group G, G name "guests" OR G name "managers"') self.assertEqual(len(rset.rows), 2) def test_select_explicit_eid(self): - rset = self.execute('Any X,E WHERE X owned_by U, X eid E, U eid %(u)s', {'u': self.session.user.eid}) + rset = self.qexecute('Any X,E WHERE X owned_by U, X eid E, U eid %(u)s', + {'u': self.session.user.eid}) self.assertTrue(rset) self.assertEqual(rset.description[0][1], 'Int') # def test_select_rewritten_optional(self): -# eid = self.execute("INSERT Affaire X: X sujet 'cool'")[0][0] -# rset = self.execute('Any X WHERE X eid %(x)s, EXISTS(X owned_by U) OR EXISTS(X concerne S?, S owned_by U)', +# eid = self.qexecute("INSERT Affaire X: X sujet 'cool'")[0][0] +# rset = self.qexecute('Any X WHERE X eid %(x)s, EXISTS(X owned_by U) OR EXISTS(X concerne S?, S owned_by U)', # {'x': eid}, 'x') # self.assertEqual(rset.rows, [[eid]]) def test_today_bug(self): - self.execute("INSERT Tag X: X name 'bidule', X creation_date NOW") - self.execute("INSERT Tag Y: Y name 'toto'") - rset = self.execute("Any D WHERE X name in ('bidule', 'toto') , X creation_date D") + self.qexecute("INSERT Tag X: X name 'bidule', X creation_date NOW") + self.qexecute("INSERT Tag Y: Y name 'toto'") + rset = self.qexecute("Any D WHERE X name in ('bidule', 'toto') , X creation_date D") self.assertIsInstance(rset.rows[0][0], datetime) - rset = self.execute('Tag X WHERE X creation_date TODAY') + rset = self.qexecute('Tag X WHERE X creation_date TODAY') self.assertEqual(len(rset.rows), 2) - rset = self.execute('Any MAX(D) WHERE X is Tag, X creation_date D') + rset = self.qexecute('Any MAX(D) WHERE X is Tag, X creation_date D') self.assertIsInstance(rset[0][0], datetime) def test_today(self): - self.execute("INSERT Tag X: X name 'bidule', X creation_date TODAY") - self.execute("INSERT Tag Y: Y name 'toto'") - rset = self.execute('Tag X WHERE X creation_date TODAY') + self.qexecute("INSERT Tag X: X name 'bidule', X creation_date TODAY") + self.qexecute("INSERT Tag Y: Y name 'toto'") + rset = self.qexecute('Tag X WHERE X creation_date TODAY') self.assertEqual(len(rset.rows), 2) def test_select_boolean(self): - rset = self.execute('Any N WHERE X is CWEType, X name N, X final %(val)s', + rset = self.qexecute('Any N WHERE X is CWEType, X name N, X final %(val)s', {'val': True}) self.assertEqual(sorted(r[0] for r in rset.rows), ['BigInt', 'Boolean', 'Bytes', 'Date', 'Datetime', @@ -841,7 +868,7 @@ 'Password', 'String', 'TZDatetime', 'TZTime', 'Time']) - rset = self.execute('Any N WHERE X is CWEType, X name N, X final TRUE') + rset = self.qexecute('Any N WHERE X is CWEType, X name N, X final TRUE') self.assertEqual(sorted(r[0] for r in rset.rows), ['BigInt', 'Boolean', 'Bytes', 'Date', 'Datetime', 'Decimal', 'Float', @@ -849,32 +876,33 @@ 'Password', 'String', 'TZDatetime', 'TZTime', 'Time']) - req = self.session - req.create_entity('Personne', nom=u'louis', test=True) - self.assertEqual(len(req.execute('Any X WHERE X test %(val)s', {'val': True})), 1) - self.assertEqual(len(req.execute('Any X WHERE X test TRUE')), 1) - self.assertEqual(len(req.execute('Any X WHERE X test %(val)s', {'val': False})), 0) - self.assertEqual(len(req.execute('Any X WHERE X test FALSE')), 0) + with self.session.new_cnx() as cnx: + cnx.create_entity('Personne', nom=u'louis', test=True) + self.assertEqual(len(cnx.execute('Any X WHERE X test %(val)s', {'val': True})), 1) + self.assertEqual(len(cnx.execute('Any X WHERE X test TRUE')), 1) + self.assertEqual(len(cnx.execute('Any X WHERE X test %(val)s', {'val': False})), 0) + self.assertEqual(len(cnx.execute('Any X WHERE X test FALSE')), 0) def test_select_constant(self): - rset = self.execute('Any X, "toto" ORDERBY X WHERE X is CWGroup') + rset = self.qexecute('Any X, "toto" ORDERBY X WHERE X is CWGroup') self.assertEqual(rset.rows, map(list, zip((2,3,4,5), ('toto','toto','toto','toto',)))) self.assertIsInstance(rset[0][1], unicode) self.assertEqual(rset.description, zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'), ('String', 'String', 'String', 'String',))) - rset = self.execute('Any X, %(value)s ORDERBY X WHERE X is CWGroup', {'value': 'toto'}) + rset = self.qexecute('Any X, %(value)s ORDERBY X WHERE X is CWGroup', {'value': 'toto'}) self.assertEqual(rset.rows, map(list, zip((2,3,4,5), ('toto','toto','toto','toto',)))) self.assertIsInstance(rset[0][1], unicode) self.assertEqual(rset.description, zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'), ('String', 'String', 'String', 'String',))) - rset = self.execute('Any X,GN WHERE X is CWUser, G is CWGroup, X login "syt", X in_group G, G name GN') + rset = self.qexecute('Any X,GN WHERE X is CWUser, G is CWGroup, X login "syt", ' + 'X in_group G, G name GN') def test_select_union(self): - rset = self.execute('Any X,N ORDERBY N WITH X,N BEING ' + rset = self.qexecute('Any X,N ORDERBY N WITH X,N BEING ' '((Any X,N WHERE X name N, X transition_of WF, WF workflow_of E, E name %(name)s)' ' UNION ' '(Any X,N WHERE X name N, X state_of WF, WF workflow_of E, E name %(name)s))', @@ -888,23 +916,26 @@ def test_select_union_aggregat(self): # meaningless, the goal in to have group by done on different attribute # for each sub-query - self.execute('(Any N,COUNT(X) GROUPBY N WHERE X name N, X is State)' + self.qexecute('(Any N,COUNT(X) GROUPBY N WHERE X name N, X is State)' ' UNION ' '(Any N,COUNT(X) GROUPBY N ORDERBY 2 WHERE X login N)') def test_select_union_aggregat_independant_group(self): - self.execute('INSERT State X: X name "hop"') - self.execute('INSERT State X: X name "hop"') - self.execute('INSERT Transition X: X name "hop"') - self.execute('INSERT Transition X: X name "hop"') - rset = self.execute('Any N,NX ORDERBY 2 WITH N,NX BEING ' - '((Any N,COUNT(X) GROUPBY N WHERE X name N, X is State HAVING COUNT(X)>1)' - ' UNION ' - '(Any N,COUNT(X) GROUPBY N WHERE X name N, X is Transition HAVING COUNT(X)>1))') - self.assertEqual(rset.rows, [[u'hop', 2], [u'hop', 2]]) + with self.session.new_cnx() as cnx: + cnx.execute('INSERT State X: X name "hop"') + cnx.execute('INSERT State X: X name "hop"') + cnx.execute('INSERT Transition X: X name "hop"') + cnx.execute('INSERT Transition X: X name "hop"') + rset = cnx.execute('Any N,NX ORDERBY 2 WITH N,NX BEING ' + '((Any N,COUNT(X) GROUPBY N WHERE X name N, ' + ' X is State HAVING COUNT(X)>1)' + ' UNION ' + '(Any N,COUNT(X) GROUPBY N WHERE X name N, ' + ' X is Transition HAVING COUNT(X)>1))') + self.assertEqual(rset.rows, [[u'hop', 2], [u'hop', 2]]) def test_select_union_selection_with_diff_variables(self): - rset = self.execute('(Any N WHERE X name N, X is State)' + rset = self.qexecute('(Any N WHERE X name N, X is State)' ' UNION ' '(Any NN WHERE XX name NN, XX is Transition)') self.assertEqual(sorted(r[0] for r in rset.rows), @@ -914,51 +945,51 @@ 'start', 'todo']) def test_select_union_description_diff_var(self): - eid1 = self.execute('CWGroup X WHERE X name "managers"')[0][0] - eid2 = self.execute('CWUser X WHERE X login "admin"')[0][0] - rset = self.execute('(Any X WHERE X eid %(x)s)' + eid1 = self.qexecute('CWGroup X WHERE X name "managers"')[0][0] + eid2 = self.qexecute('CWUser X WHERE X login "admin"')[0][0] + rset = self.qexecute('(Any X WHERE X eid %(x)s)' ' UNION ' '(Any Y WHERE Y eid %(y)s)', {'x': eid1, 'y': eid2}) self.assertEqual(rset.description[:], [('CWGroup',), ('CWUser',)]) def test_exists(self): - geid = self.execute("INSERT CWGroup X: X name 'lulufanclub'")[0][0] - self.execute("SET U in_group G WHERE G name 'lulufanclub'") - peid = self.execute("INSERT Personne X: X prenom 'lulu', X nom 'petit'")[0][0] - rset = self.execute("Any X WHERE X prenom 'lulu'," + geid = self.qexecute("INSERT CWGroup X: X name 'lulufanclub'")[0][0] + self.qexecute("SET U in_group G WHERE G name 'lulufanclub'") + peid = self.qexecute("INSERT Personne X: X prenom 'lulu', X nom 'petit'")[0][0] + rset = self.qexecute("Any X WHERE X prenom 'lulu'," "EXISTS (U in_group G, G name 'lulufanclub' OR G name 'managers');") self.assertEqual(rset.rows, [[peid]]) def test_identity(self): - eid = self.execute('Any X WHERE X identity Y, Y eid 1')[0][0] + eid = self.qexecute('Any X WHERE X identity Y, Y eid 1')[0][0] self.assertEqual(eid, 1) - eid = self.execute('Any X WHERE Y identity X, Y eid 1')[0][0] + eid = self.qexecute('Any X WHERE Y identity X, Y eid 1')[0][0] self.assertEqual(eid, 1) - login = self.execute('Any L WHERE X login "admin", X identity Y, Y login L')[0][0] + login = self.qexecute('Any L WHERE X login "admin", X identity Y, Y login L')[0][0] self.assertEqual(login, 'admin') def test_select_date_mathexp(self): - rset = self.execute('Any X, TODAY - CD WHERE X is CWUser, X creation_date CD') + rset = self.qexecute('Any X, TODAY - CD WHERE X is CWUser, X creation_date CD') self.assertTrue(rset) self.assertEqual(rset.description[0][1], 'Interval') - eid, = self.execute("INSERT Personne X: X nom 'bidule'")[0] - rset = self.execute('Any X, NOW - CD WHERE X is Personne, X creation_date CD') + eid, = self.qexecute("INSERT Personne X: X nom 'bidule'")[0] + rset = self.qexecute('Any X, NOW - CD WHERE X is Personne, X creation_date CD') self.assertEqual(rset.description[0][1], 'Interval') def test_select_subquery_aggregat_1(self): # percent users by groups - self.execute('SET X in_group G WHERE G name "users"') - rset = self.execute('Any GN, COUNT(X)*100/T GROUPBY GN ORDERBY 2,1' + self.qexecute('SET X in_group G WHERE G name "users"') + rset = self.qexecute('Any GN, COUNT(X)*100/T GROUPBY GN ORDERBY 2,1' ' WHERE G name GN, X in_group G' ' WITH T BEING (Any COUNT(U) WHERE U is CWUser)') self.assertEqual(rset.rows, [[u'guests', 50], [u'managers', 50], [u'users', 100]]) self.assertEqual(rset.description, [('String', 'Int'), ('String', 'Int'), ('String', 'Int')]) def test_select_subquery_aggregat_2(self): - expected = self.execute('Any X, 0, COUNT(T) GROUPBY X ' + expected = self.qexecute('Any X, 0, COUNT(T) GROUPBY X ' 'WHERE X is Workflow, T transition_of X').rows - rset = self.execute(''' + rset = self.qexecute(''' Any P1,B,E WHERE P1 identity P2 WITH P1,B BEING (Any P,COUNT(T) GROUPBY P WHERE P is Workflow, T is Transition, T? transition_of P, T type "auto"), @@ -967,116 +998,120 @@ self.assertEqual(sorted(rset.rows), sorted(expected)) def test_select_subquery_const(self): - rset = self.execute('Any X WITH X BEING ((Any NULL) UNION (Any "toto"))') + rset = self.qexecute('Any X WITH X BEING ((Any NULL) UNION (Any "toto"))') self.assertEqual(rset.rows, [[None], ['toto']]) self.assertEqual(rset.description, [(None,), ('String',)]) # insertion queries tests ################################################# def test_insert_is(self): - eid, = self.execute("INSERT Personne X: X nom 'bidule'")[0] - etype, = self.execute("Any TN WHERE X is T, X eid %s, T name TN" % eid)[0] + eid, = self.qexecute("INSERT Personne X: X nom 'bidule'")[0] + etype, = self.qexecute("Any TN WHERE X is T, X eid %s, T name TN" % eid)[0] self.assertEqual(etype, 'Personne') - self.execute("INSERT Personne X: X nom 'managers'") + self.qexecute("INSERT Personne X: X nom 'managers'") def test_insert_1(self): - rset = self.execute("INSERT Personne X: X nom 'bidule'") + rset = self.qexecute("INSERT Personne X: X nom 'bidule'") self.assertEqual(len(rset.rows), 1) self.assertEqual(rset.description, [('Personne',)]) - rset = self.execute('Personne X WHERE X nom "bidule"') + rset = self.qexecute('Personne X WHERE X nom "bidule"') self.assert_(rset.rows) self.assertEqual(rset.description, [('Personne',)]) def test_insert_1_multiple(self): - self.execute("INSERT Personne X: X nom 'bidule'") - self.execute("INSERT Personne X: X nom 'chouette'") - rset = self.execute("INSERT Societe Y: Y nom N, P travaille Y WHERE P nom N") + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Personne X: X nom 'chouette'") + rset = self.qexecute("INSERT Societe Y: Y nom N, P travaille Y WHERE P nom N") self.assertEqual(len(rset.rows), 2) self.assertEqual(rset.description, [('Societe',), ('Societe',)]) def test_insert_2(self): - rset = self.execute("INSERT Personne X, Personne Y: X nom 'bidule', Y nom 'tutu'") + rset = self.qexecute("INSERT Personne X, Personne Y: X nom 'bidule', Y nom 'tutu'") self.assertEqual(rset.description, [('Personne', 'Personne')]) - rset = self.execute('Personne X WHERE X nom "bidule" or X nom "tutu"') + rset = self.qexecute('Personne X WHERE X nom "bidule" or X nom "tutu"') self.assert_(rset.rows) self.assertEqual(rset.description, [('Personne',), ('Personne',)]) def test_insert_3(self): - self.execute("INSERT Personne X: X nom Y WHERE U login 'admin', U login Y") - rset = self.execute('Personne X WHERE X nom "admin"') + self.qexecute("INSERT Personne X: X nom Y WHERE U login 'admin', U login Y") + rset = self.qexecute('Personne X WHERE X nom "admin"') self.assert_(rset.rows) self.assertEqual(rset.description, [('Personne',)]) def test_insert_4(self): - self.execute("INSERT Societe Y: Y nom 'toto'") - self.execute("INSERT Personne X: X nom 'bidule', X travaille Y WHERE Y nom 'toto'") - rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') + self.qexecute("INSERT Societe Y: Y nom 'toto'") + self.qexecute("INSERT Personne X: X nom 'bidule', X travaille Y WHERE Y nom 'toto'") + rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') self.assert_(rset.rows) self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_4bis(self): - peid = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] - seid = self.execute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", + peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + seid = self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", {'x': str(peid)})[0][0] - self.assertEqual(len(self.execute('Any X, Y WHERE X travaille Y')), 1) - self.execute("INSERT Personne X: X nom 'chouette', X travaille Y WHERE Y eid %(x)s", + self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 1) + self.qexecute("INSERT Personne X: X nom 'chouette', X travaille Y WHERE Y eid %(x)s", {'x': str(seid)}) - self.assertEqual(len(self.execute('Any X, Y WHERE X travaille Y')), 2) + self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 2) def test_insert_4ter(self): - peid = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] - seid = self.execute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", + peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + seid = self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", {'x': unicode(peid)})[0][0] - self.assertEqual(len(self.execute('Any X, Y WHERE X travaille Y')), 1) - self.execute("INSERT Personne X: X nom 'chouette', X travaille Y WHERE Y eid %(x)s", + self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 1) + self.qexecute("INSERT Personne X: X nom 'chouette', X travaille Y WHERE Y eid %(x)s", {'x': unicode(seid)}) - self.assertEqual(len(self.execute('Any X, Y WHERE X travaille Y')), 2) + self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 2) def test_insert_5(self): - self.execute("INSERT Personne X: X nom 'bidule'") - self.execute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X nom 'bidule'") - rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X nom 'bidule'") + rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') self.assert_(rset.rows) self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_5bis(self): - peid = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] - self.execute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", + peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", {'x': peid}) - rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') + rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') self.assert_(rset.rows) self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_6(self): - self.execute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto', X travaille Y") - rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') + self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto', X travaille Y") + rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') self.assert_(rset.rows) self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_7(self): - self.execute("INSERT Personne X, Societe Y: X nom N, Y nom 'toto', X travaille Y WHERE U login 'admin', U login N") - rset = self.execute('Any X, Y WHERE X nom "admin", Y nom "toto", X travaille Y') + self.qexecute("INSERT Personne X, Societe Y: X nom N, Y nom 'toto', " + "X travaille Y WHERE U login 'admin', U login N") + rset = self.qexecute('Any X, Y WHERE X nom "admin", Y nom "toto", X travaille Y') self.assert_(rset.rows) self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_7_2(self): - self.execute("INSERT Personne X, Societe Y: X nom N, Y nom 'toto', X travaille Y WHERE U login N") - rset = self.execute('Any X, Y WHERE Y nom "toto", X travaille Y') + self.qexecute("INSERT Personne X, Societe Y: X nom N, Y nom 'toto', " + "X travaille Y WHERE U login N") + rset = self.qexecute('Any X, Y WHERE Y nom "toto", X travaille Y') self.assertEqual(len(rset), 2) self.assertEqual(rset.description, [('Personne', 'Societe',), ('Personne', 'Societe',)]) def test_insert_8(self): - self.execute("INSERT Societe Y, Personne X: Y nom N, X nom 'toto', X travaille Y WHERE U login 'admin', U login N") - rset = self.execute('Any X, Y WHERE X nom "toto", Y nom "admin", X travaille Y') + self.qexecute("INSERT Societe Y, Personne X: Y nom N, X nom 'toto', X travaille Y " + "WHERE U login 'admin', U login N") + rset = self.qexecute('Any X, Y WHERE X nom "toto", Y nom "admin", X travaille Y') self.assert_(rset.rows) self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_9(self): - self.execute("INSERT Societe X: X nom 'Lo'") - self.execute("INSERT Societe X: X nom 'Gi'") - self.execute("INSERT SubDivision X: X nom 'Lab'") - rset = self.execute("INSERT Personne X: X nom N, X travaille Y, X travaille_subdivision Z WHERE Y is Societe, Z is SubDivision, Y nom N") + self.qexecute("INSERT Societe X: X nom 'Lo'") + self.qexecute("INSERT Societe X: X nom 'Gi'") + self.qexecute("INSERT SubDivision X: X nom 'Lab'") + rset = self.qexecute("INSERT Personne X: X nom N, X travaille Y, X travaille_subdivision Z " + "WHERE Y is Societe, Z is SubDivision, Y nom N") self.assertEqual(len(rset), 2) self.assertEqual(rset.description, [('Personne',), ('Personne',)]) # self.assertSetEqual(set(x.nom for x in rset.entities()), @@ -1088,21 +1123,21 @@ def test_insert_query_error(self): self.assertRaises(Exception, - self.execute, + self.qexecute, "INSERT Personne X: X nom 'toto', X is Personne") self.assertRaises(Exception, - self.execute, + self.qexecute, "INSERT Personne X: X nom 'toto', X is_instance_of Personne") self.assertRaises(QueryError, - self.execute, + self.qexecute, "INSERT Personne X: X nom 'toto', X has_text 'tutu'") self.assertRaises(QueryError, - self.execute, + self.qexecute, "INSERT CWUser X: X login 'toto', X eid %s" % cnx.user(self.session).eid) def test_insertion_description_with_where(self): - rset = self.execute('INSERT CWUser E, EmailAddress EM: E login "X", E upassword "X", ' + rset = self.qexecute('INSERT CWUser E, EmailAddress EM: E login "X", E upassword "X", ' 'E primary_email EM, EM address "X", E in_group G ' 'WHERE G name "managers"') self.assertEqual(list(rset.description[0]), ['CWUser', 'EmailAddress']) @@ -1110,54 +1145,58 @@ # deletion queries tests ################################################## def test_delete_1(self): - self.execute("INSERT Personne Y: Y nom 'toto'") - rset = self.execute('Personne X WHERE X nom "toto"') + self.qexecute("INSERT Personne Y: Y nom 'toto'") + rset = self.qexecute('Personne X WHERE X nom "toto"') self.assertEqual(len(rset.rows), 1) - drset = self.execute("DELETE Personne Y WHERE Y nom 'toto'") + drset = self.qexecute("DELETE Personne Y WHERE Y nom 'toto'") self.assertEqual(drset.rows, rset.rows) - rset = self.execute('Personne X WHERE X nom "toto"') + rset = self.qexecute('Personne X WHERE X nom "toto"') self.assertEqual(len(rset.rows), 0) def test_delete_2(self): - rset = self.execute("INSERT Personne X, Personne Y, Societe Z : X nom 'syt', Y nom 'adim', Z nom 'Logilab', X travaille Z, Y travaille Z") + rset = self.qexecute("INSERT Personne X, Personne Y, Societe Z : " + "X nom 'syt', Y nom 'adim', Z nom 'Logilab', X travaille Z, Y travaille Z") self.assertEqual(len(rset), 1) self.assertEqual(len(rset[0]), 3) self.assertEqual(rset.description[0], ('Personne', 'Personne', 'Societe')) - self.assertEqual(self.execute('Any N WHERE X nom N, X eid %s'% rset[0][0])[0][0], 'syt') - rset = self.execute('Personne X WHERE X travaille Y, Y nom "Logilab"') + self.assertEqual(self.qexecute('Any N WHERE X nom N, X eid %s'% rset[0][0])[0][0], 'syt') + rset = self.qexecute('Personne X WHERE X travaille Y, Y nom "Logilab"') self.assertEqual(len(rset.rows), 2, rset.rows) - self.execute("DELETE X travaille Y WHERE X is Personne, Y nom 'Logilabo'") - rset = self.execute('Personne X WHERE X travaille Y, Y nom "Logilab"') + self.qexecute("DELETE X travaille Y WHERE X is Personne, Y nom 'Logilabo'") + rset = self.qexecute('Personne X WHERE X travaille Y, Y nom "Logilab"') self.assertEqual(len(rset.rows), 2, rset.rows) - self.execute("DELETE X travaille Y WHERE X is Personne, Y nom 'Logilab'") - rset = self.execute('Personne X WHERE X travaille Y, Y nom "Logilab"') + self.qexecute("DELETE X travaille Y WHERE X is Personne, Y nom 'Logilab'") + rset = self.qexecute('Personne X WHERE X travaille Y, Y nom "Logilab"') self.assertEqual(len(rset.rows), 0, rset.rows) def test_delete_3(self): s = self.user_groups_session('users') - peid, = self.o.execute(s, "INSERT Personne P: P nom 'toto'")[0] - seid, = self.o.execute(s, "INSERT Societe S: S nom 'logilab'")[0] - self.o.execute(s, "SET P travaille S") - rset = self.execute('Personne P WHERE P travaille S') + with s.new_cnx() as cnx: + with cnx.ensure_cnx_set: + peid, = self.o.execute(cnx, "INSERT Personne P: P nom 'toto'")[0] + seid, = self.o.execute(cnx, "INSERT Societe S: S nom 'logilab'")[0] + self.o.execute(cnx, "SET P travaille S") + cnx.commit() + rset = self.qexecute('Personne P WHERE P travaille S') self.assertEqual(len(rset.rows), 1) - self.execute("DELETE X travaille Y WHERE X eid %s, Y eid %s" % (peid, seid)) - rset = self.execute('Personne P WHERE P travaille S') + self.qexecute("DELETE X travaille Y WHERE X eid %s, Y eid %s" % (peid, seid)) + rset = self.qexecute('Personne P WHERE P travaille S') self.assertEqual(len(rset.rows), 0) def test_delete_symmetric(self): - teid1 = self.execute("INSERT Folder T: T name 'toto'")[0][0] - teid2 = self.execute("INSERT Folder T: T name 'tutu'")[0][0] - self.execute('SET X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2)) - rset = self.execute('Any X,Y WHERE X see_also Y') + teid1 = self.qexecute("INSERT Folder T: T name 'toto'")[0][0] + teid2 = self.qexecute("INSERT Folder T: T name 'tutu'")[0][0] + self.qexecute('SET X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2)) + rset = self.qexecute('Any X,Y WHERE X see_also Y') self.assertEqual(len(rset) , 2, rset.rows) - self.execute('DELETE X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2)) - rset = self.execute('Any X,Y WHERE X see_also Y') + self.qexecute('DELETE X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2)) + rset = self.qexecute('Any X,Y WHERE X see_also Y') self.assertEqual(len(rset) , 0) - self.execute('SET X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2)) - rset = self.execute('Any X,Y WHERE X see_also Y') + self.qexecute('SET X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2)) + rset = self.qexecute('Any X,Y WHERE X see_also Y') self.assertEqual(len(rset) , 2) - self.execute('DELETE X see_also Y WHERE X eid %s, Y eid %s' % (teid2, teid1)) - rset = self.execute('Any X,Y WHERE X see_also Y') + self.qexecute('DELETE X see_also Y WHERE X eid %s, Y eid %s' % (teid2, teid1)) + rset = self.qexecute('Any X,Y WHERE X see_also Y') self.assertEqual(len(rset) , 0) def test_nonregr_delete_cache(self): @@ -1165,204 +1204,221 @@ (using cachekey on sql generation returned always the same query for an eid, whatever the relation) """ - aeid, = self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')[0] + aeid, = self.qexecute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')[0] # XXX would be nice if the rql below was enough... #'INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y' - eeid, = self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y WHERE Y is EmailAddress')[0] - self.execute("DELETE Email X") - sqlc = self.session.cnxset['system'] - sqlc.execute('SELECT * FROM recipients_relation') - self.assertEqual(len(sqlc.fetchall()), 0) - sqlc.execute('SELECT * FROM owned_by_relation WHERE eid_from=%s'%eeid) - self.assertEqual(len(sqlc.fetchall()), 0) + eeid, = self.qexecute('INSERT Email X: X messageid "<1234>", X subject "test", ' + 'X sender Y, X recipients Y WHERE Y is EmailAddress')[0] + self.qexecute("DELETE Email X") + with self.session.new_cnx() as cnx: + with cnx.ensure_cnx_set: + sqlc = cnx.cnxset.cu + sqlc.execute('SELECT * FROM recipients_relation') + self.assertEqual(len(sqlc.fetchall()), 0) + sqlc.execute('SELECT * FROM owned_by_relation WHERE eid_from=%s'%eeid) + self.assertEqual(len(sqlc.fetchall()), 0) def test_nonregr_delete_cache2(self): - eid = self.execute("INSERT Folder T: T name 'toto'")[0][0] - self.commit() + eid = self.qexecute("INSERT Folder T: T name 'toto'")[0][0] # fill the cache - self.execute("Any X WHERE X eid %(x)s", {'x': eid}) - self.execute("Any X WHERE X eid %s" % eid) - self.execute("Folder X WHERE X eid %(x)s", {'x': eid}) - self.execute("Folder X WHERE X eid %s" % eid) - self.execute("DELETE Folder T WHERE T eid %s" % eid) - self.commit() - rset = self.execute("Any X WHERE X eid %(x)s", {'x': eid}) + self.qexecute("Any X WHERE X eid %(x)s", {'x': eid}) + self.qexecute("Any X WHERE X eid %s" % eid) + self.qexecute("Folder X WHERE X eid %(x)s", {'x': eid}) + self.qexecute("Folder X WHERE X eid %s" % eid) + self.qexecute("DELETE Folder T WHERE T eid %s" % eid) + rset = self.qexecute("Any X WHERE X eid %(x)s", {'x': eid}) self.assertEqual(rset.rows, []) - rset = self.execute("Any X WHERE X eid %s" % eid) + rset = self.qexecute("Any X WHERE X eid %s" % eid) self.assertEqual(rset.rows, []) - rset = self.execute("Folder X WHERE X eid %(x)s", {'x': eid}) + rset = self.qexecute("Folder X WHERE X eid %(x)s", {'x': eid}) self.assertEqual(rset.rows, []) - rset = self.execute("Folder X WHERE X eid %s" %eid) + rset = self.qexecute("Folder X WHERE X eid %s" %eid) self.assertEqual(rset.rows, []) # update queries tests #################################################### def test_update_1(self): - peid = self.execute("INSERT Personne Y: Y nom 'toto'")[0][0] - rset = self.execute('Personne X WHERE X nom "toto"') + peid = self.qexecute("INSERT Personne Y: Y nom 'toto'")[0][0] + rset = self.qexecute('Personne X WHERE X nom "toto"') self.assertEqual(len(rset.rows), 1) - rset = self.execute("SET X nom 'tutu', X prenom 'original' WHERE X is Personne, X nom 'toto'") + rset = self.qexecute("SET X nom 'tutu', X prenom 'original' WHERE X is Personne, X nom 'toto'") self.assertEqual(tuplify(rset.rows), [(peid, 'tutu', 'original')]) - rset = self.execute('Any Y, Z WHERE X is Personne, X nom Y, X prenom Z') + rset = self.qexecute('Any Y, Z WHERE X is Personne, X nom Y, X prenom Z') self.assertEqual(tuplify(rset.rows), [('tutu', 'original')]) def test_update_2(self): - peid, seid = self.execute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'")[0] - rset = self.execute("SET X travaille Y WHERE X nom 'bidule', Y nom 'toto'") + peid, seid = self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'")[0] + rset = self.qexecute("SET X travaille Y WHERE X nom 'bidule', Y nom 'toto'") self.assertEqual(tuplify(rset.rows), [(peid, seid)]) - rset = self.execute('Any X, Y WHERE X travaille Y') + rset = self.qexecute('Any X, Y WHERE X travaille Y') self.assertEqual(len(rset.rows), 1) def test_update_2bis(self): - rset = self.execute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'") + rset = self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'") eid1, eid2 = rset[0][0], rset[0][1] - self.execute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s", + self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s", {'x': str(eid1), 'y': str(eid2)}) - rset = self.execute('Any X, Y WHERE X travaille Y') + rset = self.qexecute('Any X, Y WHERE X travaille Y') self.assertEqual(len(rset.rows), 1) # test add of an existant relation but with NOT X rel Y protection - self.assertFalse(self.execute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s," + self.assertFalse(self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s," "NOT X travaille Y", {'x': str(eid1), 'y': str(eid2)})) def test_update_2ter(self): - rset = self.execute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'") + rset = self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'") eid1, eid2 = rset[0][0], rset[0][1] - self.execute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s", + self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s", {'x': unicode(eid1), 'y': unicode(eid2)}) - rset = self.execute('Any X, Y WHERE X travaille Y') + rset = self.qexecute('Any X, Y WHERE X travaille Y') self.assertEqual(len(rset.rows), 1) def test_update_multiple1(self): - peid1 = self.execute("INSERT Personne Y: Y nom 'tutu'")[0][0] - peid2 = self.execute("INSERT Personne Y: Y nom 'toto'")[0][0] - self.execute("SET X nom 'tutu', Y nom 'toto' WHERE X nom 'toto', Y nom 'tutu'") - self.assertEqual(self.execute('Any X WHERE X nom "toto"').rows, [[peid1]]) - self.assertEqual(self.execute('Any X WHERE X nom "tutu"').rows, [[peid2]]) + peid1 = self.qexecute("INSERT Personne Y: Y nom 'tutu'")[0][0] + peid2 = self.qexecute("INSERT Personne Y: Y nom 'toto'")[0][0] + self.qexecute("SET X nom 'tutu', Y nom 'toto' WHERE X nom 'toto', Y nom 'tutu'") + self.assertEqual(self.qexecute('Any X WHERE X nom "toto"').rows, [[peid1]]) + self.assertEqual(self.qexecute('Any X WHERE X nom "tutu"').rows, [[peid2]]) def test_update_multiple2(self): - ueid = self.execute("INSERT CWUser X: X login 'bob', X upassword 'toto'")[0][0] - peid1 = self.execute("INSERT Personne Y: Y nom 'turlu'")[0][0] - peid2 = self.execute("INSERT Personne Y: Y nom 'tutu'")[0][0] - self.execute('SET P1 owned_by U, P2 owned_by U ' - 'WHERE P1 eid %s, P2 eid %s, U eid %s' % (peid1, peid2, ueid)) - self.assertTrue(self.execute('Any X WHERE X eid %s, X owned_by U, U eid %s' - % (peid1, ueid))) - self.assertTrue(self.execute('Any X WHERE X eid %s, X owned_by U, U eid %s' - % (peid2, ueid))) + with self.session.new_cnx() as cnx: + ueid = cnx.execute("INSERT CWUser X: X login 'bob', X upassword 'toto'")[0][0] + peid1 = cnx.execute("INSERT Personne Y: Y nom 'turlu'")[0][0] + peid2 = cnx.execute("INSERT Personne Y: Y nom 'tutu'")[0][0] + cnx.execute('SET P1 owned_by U, P2 owned_by U ' + 'WHERE P1 eid %s, P2 eid %s, U eid %s' % (peid1, peid2, ueid)) + self.assertTrue(cnx.execute('Any X WHERE X eid %s, X owned_by U, U eid %s' + % (peid1, ueid))) + self.assertTrue(cnx.execute('Any X WHERE X eid %s, X owned_by U, U eid %s' + % (peid2, ueid))) def test_update_math_expr(self): - orders = [r[0] for r in self.execute('Any O ORDERBY O WHERE ST name "Personne", X from_entity ST, X ordernum O')] + orders = [r[0] for r in self.qexecute('Any O ORDERBY O WHERE ST name "Personne", ' + 'X from_entity ST, X ordernum O')] for i,v in enumerate(orders): if v != orders[0]: splitidx = i break - self.execute('SET X ordernum Y+1 WHERE X from_entity SE, SE name "Personne", X ordernum Y, X ordernum >= %(order)s', + self.qexecute('SET X ordernum Y+1 WHERE X from_entity SE, SE name "Personne", ' + 'X ordernum Y, X ordernum >= %(order)s', {'order': orders[splitidx]}) - orders2 = [r[0] for r in self.execute('Any O ORDERBY O WHERE ST name "Personne", X from_entity ST, X ordernum O')] + orders2 = [r[0] for r in self.qexecute('Any O ORDERBY O WHERE ST name "Personne", ' + 'X from_entity ST, X ordernum O')] orders = orders[:splitidx] + [o+1 for o in orders[splitidx:]] self.assertEqual(orders2, orders) def test_update_string_concat(self): - beid = self.execute("INSERT Bookmark Y: Y title 'toto', Y path '/view'")[0][0] - self.execute('SET X title XN + %(suffix)s WHERE X is Bookmark, X title XN', {'suffix': u'-moved'}) - newname = self.execute('Any XN WHERE X eid %(x)s, X title XN', {'x': beid})[0][0] + beid = self.qexecute("INSERT Bookmark Y: Y title 'toto', Y path '/view'")[0][0] + self.qexecute('SET X title XN + %(suffix)s WHERE X is Bookmark, X title XN', + {'suffix': u'-moved'}) + newname = self.qexecute('Any XN WHERE X eid %(x)s, X title XN', {'x': beid})[0][0] self.assertEqual(newname, 'toto-moved') def test_update_not_exists(self): - rset = self.execute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'") + rset = self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'") eid1, eid2 = rset[0][0], rset[0][1] - rset = self.execute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s, " + rset = self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s, " "NOT EXISTS(Z ecrit_par X)", {'x': unicode(eid1), 'y': unicode(eid2)}) self.assertEqual(tuplify(rset.rows), [(eid1, eid2)]) def test_update_query_error(self): - self.execute("INSERT Personne Y: Y nom 'toto'") - self.assertRaises(Exception, self.execute, "SET X nom 'toto', X is Personne") - self.assertRaises(QueryError, self.execute, "SET X nom 'toto', X has_text 'tutu' WHERE X is Personne") - self.assertRaises(QueryError, self.execute, "SET X login 'tutu', X eid %s" % cnx.user(self.session).eid) + self.qexecute("INSERT Personne Y: Y nom 'toto'") + self.assertRaises(Exception, self.qexecute, "SET X nom 'toto', X is Personne") + self.assertRaises(QueryError, self.qexecute, "SET X nom 'toto', X has_text 'tutu' " + "WHERE X is Personne") + self.assertRaises(QueryError, + self.qexecute, + "SET X login 'tutu', X eid %s" % cnx.user(self.session).eid) # HAVING on write queries test ############################################# def test_update_having(self): - peid1 = self.execute("INSERT Personne Y: Y nom 'hop', Y tel 1")[0][0] - peid2 = self.execute("INSERT Personne Y: Y nom 'hop', Y tel 2")[0][0] - rset = self.execute("SET X tel 3 WHERE X tel TEL HAVING TEL&1=1") + peid1 = self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 1")[0][0] + peid2 = self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 2")[0][0] + rset = self.qexecute("SET X tel 3 WHERE X tel TEL HAVING TEL&1=1") self.assertEqual(tuplify(rset.rows), [(peid1, 3)]) def test_insert_having(self): self.skipTest('unsupported yet') - self.execute("INSERT Personne Y: Y nom 'hop', Y tel 1")[0][0] - self.assertFalse(self.execute("INSERT Personne Y: Y nom 'hop', Y tel 2 WHERE X tel XT HAVING XT&2=2")) - self.assertTrue(self.execute("INSERT Personne Y: Y nom 'hop', Y tel 2 WHERE X tel XT HAVING XT&1=1")) + self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 1")[0][0] + self.assertFalse(self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 2 " + "WHERE X tel XT HAVING XT&2=2")) + self.assertTrue(self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 2 " + "WHERE X tel XT HAVING XT&1=1")) def test_delete_having(self): - self.execute("INSERT Personne Y: Y nom 'hop', Y tel 1")[0][0] - self.assertFalse(self.execute("DELETE Personne Y WHERE X tel XT HAVING XT&2=2")) - self.assertTrue(self.execute("DELETE Personne Y WHERE X tel XT HAVING XT&1=1")) + self.qexecute("INSERT Personne Y: Y nom 'hop', Y tel 1")[0][0] + self.assertFalse(self.qexecute("DELETE Personne Y WHERE X tel XT HAVING XT&2=2")) + self.assertTrue(self.qexecute("DELETE Personne Y WHERE X tel XT HAVING XT&1=1")) # upassword encryption tests ################################################# def test_insert_upassword(self): - rset = self.execute("INSERT CWUser X: X login 'bob', X upassword 'toto'") + rset = self.qexecute("INSERT CWUser X: X login 'bob', X upassword 'toto', " + "X in_group G WHERE G name 'users'") self.assertEqual(len(rset.rows), 1) self.assertEqual(rset.description, [('CWUser',)]) self.assertRaises(Unauthorized, - self.execute, "Any P WHERE X is CWUser, X login 'bob', X upassword P") - cursor = self.cnxset['system'] - cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'" - % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX)) - passwd = str(cursor.fetchone()[0]) - self.assertEqual(passwd, crypt_password('toto', passwd)) - rset = self.execute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s", + self.qexecute, "Any P WHERE X is CWUser, X login 'bob', X upassword P") + with self.session.new_cnx() as cnx: + with cnx.ensure_cnx_set: + cursor = cnx.cnxset.cu + cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'" + % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX)) + passwd = str(cursor.fetchone()[0]) + self.assertEqual(passwd, crypt_password('toto', passwd)) + rset = self.qexecute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s", {'pwd': Binary(passwd)}) self.assertEqual(len(rset.rows), 1) self.assertEqual(rset.description, [('CWUser',)]) def test_update_upassword(self): - rset = self.execute("INSERT CWUser X: X login 'bob', X upassword %(pwd)s", {'pwd': 'toto'}) - self.assertEqual(rset.description[0][0], 'CWUser') - rset = self.execute("SET X upassword %(pwd)s WHERE X is CWUser, X login 'bob'", - {'pwd': 'tutu'}) - cursor = self.cnxset['system'] - cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'" - % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX)) - passwd = str(cursor.fetchone()[0]) - self.assertEqual(passwd, crypt_password('tutu', passwd)) - rset = self.execute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s", - {'pwd': Binary(passwd)}) - self.assertEqual(len(rset.rows), 1) - self.assertEqual(rset.description, [('CWUser',)]) + with self.session.new_cnx() as cnx: + with cnx.ensure_cnx_set: + rset = cnx.execute("INSERT CWUser X: X login 'bob', X upassword %(pwd)s", + {'pwd': 'toto'}) + self.assertEqual(rset.description[0][0], 'CWUser') + rset = cnx.execute("SET X upassword %(pwd)s WHERE X is CWUser, X login 'bob'", + {'pwd': 'tutu'}) + cursor = cnx.cnxset.cu + cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'" + % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX)) + passwd = str(cursor.fetchone()[0]) + self.assertEqual(passwd, crypt_password('tutu', passwd)) + rset = cnx.execute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s", + {'pwd': Binary(passwd)}) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(rset.description, [('CWUser',)]) # ZT datetime tests ######################################################## def test_tz_datetime(self): - self.execute("INSERT Personne X: X nom 'bob', X tzdatenaiss %(date)s", + self.qexecute("INSERT Personne X: X nom 'bob', X tzdatenaiss %(date)s", {'date': datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1))}) - datenaiss = self.execute("Any XD WHERE X nom 'bob', X tzdatenaiss XD")[0][0] + datenaiss = self.qexecute("Any XD WHERE X nom 'bob', X tzdatenaiss XD")[0][0] self.assertEqual(datenaiss.tzinfo, None) self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 1, 0)) # non regression tests ##################################################### def test_nonregr_1(self): - teid = self.execute("INSERT Tag X: X name 'tag'")[0][0] - self.execute("SET X tags Y WHERE X name 'tag', Y is State, Y name 'activated'") - rset = self.execute('Any X WHERE T tags X') + teid = self.qexecute("INSERT Tag X: X name 'tag'")[0][0] + self.qexecute("SET X tags Y WHERE X name 'tag', Y is State, Y name 'activated'") + rset = self.qexecute('Any X WHERE T tags X') self.assertEqual(len(rset.rows), 1, rset.rows) - rset = self.execute('Any T WHERE T tags X, X is State') + rset = self.qexecute('Any T WHERE T tags X, X is State') self.assertEqual(rset.rows, [[teid]]) - rset = self.execute('Any T WHERE T tags X') + rset = self.qexecute('Any T WHERE T tags X') self.assertEqual(rset.rows, [[teid]]) def test_nonregr_2(self): - teid = self.execute("INSERT Tag X: X name 'tag'")[0][0] - geid = self.execute("CWGroup G WHERE G name 'users'")[0][0] - self.execute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s", + teid = self.qexecute("INSERT Tag X: X name 'tag'")[0][0] + geid = self.qexecute("CWGroup G WHERE G name 'users'")[0][0] + self.qexecute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s", {'g': geid, 't': teid}) - rset = self.execute('Any X WHERE E eid %(x)s, E tags X', + rset = self.qexecute('Any X WHERE E eid %(x)s, E tags X', {'x': teid}) self.assertEqual(rset.rows, [[geid]]) @@ -1370,7 +1426,7 @@ """bad sql generated on the second query (destination_state is not detected as an inlined relation) """ - rset = self.execute('Any S,ES,T WHERE S state_of WF, WF workflow_of ET, ET name "CWUser",' + rset = self.qexecute('Any S,ES,T WHERE S state_of WF, WF workflow_of ET, ET name "CWUser",' 'ES allowed_transition T, T destination_state S') self.assertEqual(len(rset.rows), 2) @@ -1378,26 +1434,28 @@ # fix variables'type, else we get (nb of entity types with a 'name' attribute)**3 # union queries and that make for instance a 266Ko sql query which is refused # by the server (or client lib) - rset = self.execute('Any ER,SE,OE WHERE SE name "Comment", ER name "comments", OE name "Comment",' + rset = self.qexecute('Any ER,SE,OE WHERE SE name "Comment", ER name "comments", OE name "Comment",' 'ER is CWRType, SE is CWEType, OE is CWEType') self.assertEqual(len(rset), 1) def test_nonregr_5(self): # jpl #15505: equivalent queries returning different result sets - teid1 = self.execute("INSERT Folder X: X name 'hop'")[0][0] - teid2 = self.execute("INSERT Folder X: X name 'hip'")[0][0] - neid = self.execute("INSERT Note X: X todo_by U, X filed_under T WHERE U login 'admin', T name 'hop'")[0][0] - weid = self.execute("INSERT Affaire X: X concerne N, X filed_under T WHERE N is Note, T name 'hip'")[0][0] - rset1 = self.execute('Any N,U WHERE N filed_under T, T eid %s,' + teid1 = self.qexecute("INSERT Folder X: X name 'hop'")[0][0] + teid2 = self.qexecute("INSERT Folder X: X name 'hip'")[0][0] + neid = self.qexecute("INSERT Note X: X todo_by U, X filed_under T " + "WHERE U login 'admin', T name 'hop'")[0][0] + weid = self.qexecute("INSERT Affaire X: X concerne N, X filed_under T " + "WHERE N is Note, T name 'hip'")[0][0] + rset1 = self.qexecute('Any N,U WHERE N filed_under T, T eid %s,' 'N todo_by U, W concerne N,' 'W is Affaire, W filed_under A, A eid %s' % (teid1, teid2)) - rset2 = self.execute('Any N,U WHERE N filed_under T, T eid %s,' + rset2 = self.qexecute('Any N,U WHERE N filed_under T, T eid %s,' 'N todo_by U, W concerne N,' 'W filed_under A, A eid %s' % (teid1, teid2)) - rset3 = self.execute('Any N,U WHERE N todo_by U, T eid %s,' + rset3 = self.qexecute('Any N,U WHERE N todo_by U, T eid %s,' 'N filed_under T, W concerne N,' 'W is Affaire, W filed_under A, A eid %s' % (teid1, teid2)) - rset4 = self.execute('Any N,U WHERE N todo_by U, T eid %s,' + rset4 = self.qexecute('Any N,U WHERE N todo_by U, T eid %s,' 'N filed_under T, W concerne N,' 'W filed_under A, A eid %s' % (teid1, teid2)) self.assertEqual(rset1.rows, rset2.rows) @@ -1405,19 +1463,19 @@ self.assertEqual(rset1.rows, rset4.rows) def test_nonregr_6(self): - self.execute('Any N,COUNT(S) GROUPBY N ORDERBY COUNT(N) WHERE S name N, S is State') + self.qexecute('Any N,COUNT(S) GROUPBY N ORDERBY COUNT(N) WHERE S name N, S is State') def test_sqlite_encoding(self): """XXX this test was trying to show a bug on use of lower which only occurs with non ascii string and misconfigured locale """ - self.execute("INSERT Tag X: X name %(name)s," + self.qexecute("INSERT Tag X: X name %(name)s," "X modification_date %(modification_date)s," "X creation_date %(creation_date)s", {'name': u'éname0', 'modification_date': '2003/03/12 11:00', 'creation_date': '2000/07/03 11:00'}) - rset = self.execute('Any lower(N) ORDERBY LOWER(N) WHERE X is Tag, X name N,' + rset = self.qexecute('Any lower(N) ORDERBY LOWER(N) WHERE X is Tag, X name N,' 'X owned_by U, U eid %(x)s', {'x':self.session.user.eid}) self.assertEqual(rset.rows, [[u'\xe9name0']]) @@ -1428,35 +1486,34 @@ solutions may be "fusionned" into one by the querier while all solutions are needed to build the result's description """ - self.execute("INSERT Personne X: X nom 'bidule'") - self.execute("INSERT Societe Y: Y nom 'toto'") - beid = self.execute("INSERT Basket B: B name 'mybasket'")[0][0] - self.execute("SET X in_basket B WHERE X is Personne") - self.execute("SET X in_basket B WHERE X is Societe") - rset = self.execute('Any X WHERE X in_basket B, B eid %s' % beid) + self.qexecute("INSERT Personne X: X nom 'bidule'") + self.qexecute("INSERT Societe Y: Y nom 'toto'") + beid = self.qexecute("INSERT Basket B: B name 'mybasket'")[0][0] + self.qexecute("SET X in_basket B WHERE X is Personne") + self.qexecute("SET X in_basket B WHERE X is Societe") + rset = self.qexecute('Any X WHERE X in_basket B, B eid %s' % beid) self.assertEqual(len(rset), 2) self.assertEqual(rset.description, [('Personne',), ('Societe',)]) def test_nonregr_cache_1(self): - peid = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] - beid = self.execute("INSERT Basket X: X name 'tag'")[0][0] - self.execute("SET X in_basket Y WHERE X is Personne, Y eid %(y)s", + peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + beid = self.qexecute("INSERT Basket X: X name 'tag'")[0][0] + self.qexecute("SET X in_basket Y WHERE X is Personne, Y eid %(y)s", {'y': beid}) - rset = self.execute("Any X WHERE X in_basket B, B eid %(x)s", + rset = self.qexecute("Any X WHERE X in_basket B, B eid %(x)s", {'x': beid}) self.assertEqual(rset.rows, [[peid]]) - rset = self.execute("Any X WHERE X in_basket B, B eid %(x)s", + rset = self.qexecute("Any X WHERE X in_basket B, B eid %(x)s", {'x': beid}) self.assertEqual(rset.rows, [[peid]]) def test_nonregr_has_text_cache(self): - eid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] - eid2 = self.execute("INSERT Personne X: X nom 'tag'")[0][0] - self.commit() - rset = self.execute("Any X WHERE X has_text %(text)s", {'text': 'bidule'}) + eid1 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + eid2 = self.qexecute("INSERT Personne X: X nom 'tag'")[0][0] + rset = self.qexecute("Any X WHERE X has_text %(text)s", {'text': 'bidule'}) self.assertEqual(rset.rows, [[eid1]]) - rset = self.execute("Any X WHERE X has_text %(text)s", {'text': 'tag'}) + rset = self.qexecute("Any X WHERE X has_text %(text)s", {'text': 'tag'}) self.assertEqual(rset.rows, [[eid2]]) def test_nonregr_sortterm_management(self): @@ -1467,133 +1524,133 @@ need sqlite including http://www.sqlite.org/cvstrac/tktview?tn=3773 fix """ - self.execute('Any X ORDERBY D DESC WHERE X creation_date D') + self.qexecute('Any X ORDERBY D DESC WHERE X creation_date D') def test_nonregr_extra_joins(self): ueid = self.session.user.eid - teid1 = self.execute("INSERT Folder X: X name 'folder1'")[0][0] - teid2 = self.execute("INSERT Folder X: X name 'folder2'")[0][0] - neid1 = self.execute("INSERT Note X: X para 'note1'")[0][0] - neid2 = self.execute("INSERT Note X: X para 'note2'")[0][0] - self.execute("SET X filed_under Y WHERE X eid %s, Y eid %s" + teid1 = self.qexecute("INSERT Folder X: X name 'folder1'")[0][0] + teid2 = self.qexecute("INSERT Folder X: X name 'folder2'")[0][0] + neid1 = self.qexecute("INSERT Note X: X para 'note1'")[0][0] + neid2 = self.qexecute("INSERT Note X: X para 'note2'")[0][0] + self.qexecute("SET X filed_under Y WHERE X eid %s, Y eid %s" % (neid1, teid1)) - self.execute("SET X filed_under Y WHERE X eid %s, Y eid %s" + self.qexecute("SET X filed_under Y WHERE X eid %s, Y eid %s" % (neid2, teid2)) - self.execute("SET X todo_by Y WHERE X is Note, Y eid %s" % ueid) - rset = self.execute('Any N WHERE N todo_by U, N is Note, U eid %s, N filed_under T, T eid %s' + self.qexecute("SET X todo_by Y WHERE X is Note, Y eid %s" % ueid) + rset = self.qexecute('Any N WHERE N todo_by U, N is Note, U eid %s, N filed_under T, T eid %s' % (ueid, teid1)) self.assertEqual(len(rset), 1) def test_nonregr_XXX(self): - teid = self.execute('Transition S WHERE S name "deactivate"')[0][0] - rset = self.execute('Any O WHERE O is State, ' + teid = self.qexecute('Transition S WHERE S name "deactivate"')[0][0] + rset = self.qexecute('Any O WHERE O is State, ' 'S eid %(x)s, S transition_of ET, O state_of ET', {'x': teid}) self.assertEqual(len(rset), 2) - rset = self.execute('Any O WHERE O is State, NOT S destination_state O, ' + rset = self.qexecute('Any O WHERE O is State, NOT S destination_state O, ' 'S eid %(x)s, S transition_of ET, O state_of ET', {'x': teid}) self.assertEqual(len(rset), 1) def test_nonregr_set_datetime(self): # huum, psycopg specific - self.execute('SET X creation_date %(date)s WHERE X eid 1', {'date': date.today()}) - - def test_nonregr_set_query(self): - ueid = self.execute("INSERT CWUser X: X login 'bob', X upassword 'toto'")[0][0] - self.execute("SET E in_group G, E firstname %(firstname)s, E surname %(surname)s " - "WHERE E eid %(x)s, G name 'users'", - {'x':ueid, 'firstname': u'jean', 'surname': u'paul'}) + self.qexecute('SET X creation_date %(date)s WHERE X eid 1', {'date': date.today()}) def test_nonregr_u_owned_by_u(self): - ueid = self.execute("INSERT CWUser X: X login 'bob', X upassword 'toto', X in_group G " + ueid = self.qexecute("INSERT CWUser X: X login 'bob', X upassword 'toto', X in_group G " "WHERE G name 'users'")[0][0] - rset = self.execute("CWUser U") + rset = self.qexecute("CWUser U") self.assertEqual(len(rset), 3) # bob + admin + anon - rset = self.execute("Any U WHERE NOT U owned_by U") - self.assertEqual(len(rset), 0) # even admin created at repo initialization time should belong to itself + rset = self.qexecute("Any U WHERE NOT U owned_by U") + # even admin created at repo initialization time should belong to itself + self.assertEqual(len(rset), 0) def test_nonreg_update_index(self): # this is the kind of queries generated by "cubicweb-ctl db-check -ry" - self.execute("SET X description D WHERE X is State, X description D") + self.qexecute("SET X description D WHERE X is State, X description D") def test_nonregr_is(self): - uteid = self.execute('Any ET WHERE ET name "CWUser"')[0][0] - self.execute('Any X, ET WHERE X is ET, ET eid %s' % uteid) + uteid = self.qexecute('Any ET WHERE ET name "CWUser"')[0][0] + self.qexecute('Any X, ET WHERE X is ET, ET eid %s' % uteid) def test_nonregr_orderby(self): - seid = self.execute('Any X WHERE X name "activated"')[0][0] - self.execute('Any X,S, MAX(T) GROUPBY X,S ORDERBY S WHERE X is CWUser, T tags X, S eid IN(%s), X in_state S' % seid) + seid = self.qexecute('Any X WHERE X name "activated"')[0][0] + self.qexecute('Any X,S, MAX(T) GROUPBY X,S ORDERBY S ' + 'WHERE X is CWUser, T tags X, S eid IN(%s), X in_state S' % seid) def test_nonregr_solution_cache(self): self.skipTest('XXX should be fixed or documented') # (doesn't occur if cache key is provided.) - rset = self.execute('Any X WHERE X is CWUser, X eid %(x)s', {'x':self.ueid}) + rset = self.qexecute('Any X WHERE X is CWUser, X eid %(x)s', {'x':self.ueid}) self.assertEqual(len(rset), 1) - rset = self.execute('Any X WHERE X is CWUser, X eid %(x)s', {'x':12345}) + rset = self.qexecute('Any X WHERE X is CWUser, X eid %(x)s', {'x':12345}) self.assertEqual(len(rset), 0) def test_nonregr_final_norestr(self): - self.assertRaises(BadRQLQuery, self.execute, 'Date X') + self.assertRaises(BadRQLQuery, self.qexecute, 'Date X') def test_nonregr_eid_cmp(self): - peid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] - peid2 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] - rset = self.execute('Any X,Y WHERE X is Personne, Y is Personne, X nom XD, Y nom XD, X eid Z, Y eid > Z') + peid1 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + peid2 = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] + rset = self.qexecute('Any X,Y WHERE X is Personne, Y is Personne, ' + 'X nom XD, Y nom XD, X eid Z, Y eid > Z') self.assertEqual(rset.rows, [[peid1, peid2]]) - rset = self.execute('Any X,Y WHERE X nom XD, Y nom XD, X eid Z, Y eid > Z') + rset = self.qexecute('Any X,Y WHERE X nom XD, Y nom XD, X eid Z, Y eid > Z') self.assertEqual(rset.rows, [[peid1, peid2]]) def test_nonregr_has_text_ambiguity_1(self): - peid = self.execute("INSERT CWUser X: X login 'bidule', X upassword 'bidule', X in_group G WHERE G name 'users'")[0][0] - aeid = self.execute("INSERT Affaire X: X ref 'bidule'")[0][0] - self.commit() - rset = self.execute('Any X WHERE X is CWUser, X has_text "bidule"') + peid = self.qexecute("INSERT CWUser X: X login 'bidule', X upassword 'bidule', " + "X in_group G WHERE G name 'users'")[0][0] + aeid = self.qexecute("INSERT Affaire X: X ref 'bidule'")[0][0] + rset = self.qexecute('Any X WHERE X is CWUser, X has_text "bidule"') self.assertEqual(rset.rows, [[peid]]) - rset = self.execute('Any X WHERE X is CWUser, X has_text "bidule", X in_state S, S name SN') + rset = self.qexecute('Any X WHERE X is CWUser, X has_text "bidule", ' + 'X in_state S, S name SN') self.assertEqual(rset.rows, [[peid]]) def test_nonregr_sql_cache(self): # different SQL generated when 'name' is None or not (IS NULL). - self.assertFalse(self.execute('Any X WHERE X is CWEType, X name %(name)s', {'name': None})) - self.assertTrue(self.execute('Any X WHERE X is CWEType, X name %(name)s', {'name': 'CWEType'})) + self.assertFalse(self.qexecute('Any X WHERE X is CWEType, X name %(name)s', + {'name': None})) + self.assertTrue(self.qexecute('Any X WHERE X is CWEType, X name %(name)s', + {'name': 'CWEType'})) class NonRegressionTC(CubicWebTC): def test_has_text_security_cache_bug(self): - req = self.request() - self.create_user(req, 'user', ('users',)) - aff1 = req.create_entity('Societe', nom=u'aff1') - aff2 = req.create_entity('Societe', nom=u'aff2') - self.commit() - with self.login('user', password='user'): - res = self.execute('Any X WHERE X has_text %(text)s', {'text': 'aff1'}) + with self.admin_access.repo_cnx() as cnx: + self.create_user(cnx, 'user', ('users',)) + aff1 = cnx.create_entity('Societe', nom=u'aff1') + aff2 = cnx.create_entity('Societe', nom=u'aff2') + cnx.commit() + with self.new_access('user').repo_cnx() as cnx: + res = cnx.execute('Any X WHERE X has_text %(text)s', {'text': 'aff1'}) self.assertEqual(res.rows, [[aff1.eid]]) - res = self.execute('Any X WHERE X has_text %(text)s', {'text': 'aff2'}) + res = cnx.execute('Any X WHERE X has_text %(text)s', {'text': 'aff2'}) self.assertEqual(res.rows, [[aff2.eid]]) def test_set_relations_eid(self): - req = self.request() - # create 3 email addresses - a1 = req.create_entity('EmailAddress', address=u'a1') - a2 = req.create_entity('EmailAddress', address=u'a2') - a3 = req.create_entity('EmailAddress', address=u'a3') - # SET relations using '>=' operator on eids - req.execute('SET U use_email A WHERE U login "admin", A eid >= %s' % a2.eid) - self.assertEqual( - [[a2.eid], [a3.eid]], - req.execute('Any A ORDERBY A WHERE U use_email A, U login "admin"').rows) - # DELETE - req.execute('DELETE U use_email A WHERE U login "admin", A eid > %s' % a2.eid) - self.assertEqual( - [[a2.eid]], - req.execute('Any A ORDERBY A WHERE U use_email A, U login "admin"').rows) - req.execute('DELETE U use_email A WHERE U login "admin"') - # SET relations using '<' operator on eids - req.execute('SET U use_email A WHERE U login "admin", A eid < %s' % a2.eid) - self.assertEqual( - [[a1.eid]], - req.execute('Any A ORDERBY A WHERE U use_email A, U login "admin"').rows) + with self.admin_access.repo_cnx() as cnx: + # create 3 email addresses + a1 = cnx.create_entity('EmailAddress', address=u'a1') + a2 = cnx.create_entity('EmailAddress', address=u'a2') + a3 = cnx.create_entity('EmailAddress', address=u'a3') + # SET relations using '>=' operator on eids + cnx.execute('SET U use_email A WHERE U login "admin", A eid >= %s' % a2.eid) + self.assertEqual( + [[a2.eid], [a3.eid]], + cnx.execute('Any A ORDERBY A WHERE U use_email A, U login "admin"').rows) + # DELETE + cnx.execute('DELETE U use_email A WHERE U login "admin", A eid > %s' % a2.eid) + self.assertEqual( + [[a2.eid]], + cnx.execute('Any A ORDERBY A WHERE U use_email A, U login "admin"').rows) + cnx.execute('DELETE U use_email A WHERE U login "admin"') + # SET relations using '<' operator on eids + cnx.execute('SET U use_email A WHERE U login "admin", A eid < %s' % a2.eid) + self.assertEqual( + [[a1.eid]], + cnx.execute('Any A ORDERBY A WHERE U use_email A, U login "admin"').rows) if __name__ == '__main__': unittest_main() diff -r 84738d495ffd -r 793377697c81 server/test/unittest_repository.py --- a/server/test/unittest_repository.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_repository.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ # -*- coding: iso-8859-1 -*- -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -18,25 +18,19 @@ # with CubicWeb. If not, see . """unit tests for module cubicweb.server.repository""" -import os -import sys import threading import time import logging -from copy import deepcopy -from datetime import datetime - -from logilab.common.testlib import TestCase, unittest_main from yams.constraints import UniqueConstraint from yams import register_base_type, unregister_base_type from logilab.database import get_db_helper -from cubicweb import (BadConnectionId, RepositoryError, ValidationError, +from cubicweb import (BadConnectionId, ValidationError, UnknownEid, AuthenticationError, Unauthorized, QueryError) from cubicweb.predicates import is_instance -from cubicweb.schema import CubicWebSchema, RQLConstraint +from cubicweb.schema import RQLConstraint from cubicweb.dbapi import connect, multiple_connections_unfix from cubicweb.devtools.testlib import CubicWebTC from cubicweb.devtools.repotest import tuplify @@ -53,15 +47,16 @@ """ def test_unique_together_constraint(self): - self.execute('INSERT Societe S: S nom "Logilab", S type "SSLL", S cp "75013"') - with self.assertRaises(ValidationError) as wraperr: - self.execute('INSERT Societe S: S nom "Logilab", S type "SSLL", S cp "75013"') - self.assertEqual( - {'cp': u'cp is part of violated unicity constraint', - 'nom': u'nom is part of violated unicity constraint', - 'type': u'type is part of violated unicity constraint', - 'unicity constraint': u'some relations violate a unicity constraint'}, - wraperr.exception.args[1]) + with self.admin_access.repo_cnx() as cnx: + cnx.execute('INSERT Societe S: S nom "Logilab", S type "SSLL", S cp "75013"') + with self.assertRaises(ValidationError) as wraperr: + cnx.execute('INSERT Societe S: S nom "Logilab", S type "SSLL", S cp "75013"') + self.assertEqual( + {'cp': u'cp is part of violated unicity constraint', + 'nom': u'nom is part of violated unicity constraint', + 'type': u'type is part of violated unicity constraint', + 'unicity constraint': u'some relations violate a unicity constraint'}, + wraperr.exception.args[1]) def test_unique_together_schema(self): person = self.repo.schema.eschema('Personne') @@ -70,13 +65,16 @@ ('nom', 'prenom', 'inline2')) def test_all_entities_have_owner(self): - self.assertFalse(self.execute('Any X WHERE NOT X owned_by U')) + with self.admin_access.repo_cnx() as cnx: + self.assertFalse(cnx.execute('Any X WHERE NOT X owned_by U')) def test_all_entities_have_is(self): - self.assertFalse(self.execute('Any X WHERE NOT X is ET')) + with self.admin_access.repo_cnx() as cnx: + self.assertFalse(cnx.execute('Any X WHERE NOT X is ET')) def test_all_entities_have_cw_source(self): - self.assertFalse(self.execute('Any X WHERE NOT X cw_source S')) + with self.admin_access.repo_cnx() as cnx: + self.assertFalse(cnx.execute('Any X WHERE NOT X cw_source S')) def test_connect(self): cnxid = self.repo.connect(self.admlogin, password=self.admpassword) @@ -131,15 +129,17 @@ events = ('after_update_entity',) def __call__(self): raise ValidationError(self.entity.eid, {}) - with self.temporary_appobjects(ValidationErrorAfterHook): - self.assertRaises(ValidationError, - self.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"') - self.assertTrue(self.execute('Any X WHERE X is CWGroup, X name "toto"')) - with self.assertRaises(QueryError) as cm: - self.commit() - self.assertEqual(str(cm.exception), 'transaction must be rolled back') - self.rollback() - self.assertFalse(self.execute('Any X WHERE X is CWGroup, X name "toto"')) + + with self.admin_access.repo_cnx() as cnx: + with self.temporary_appobjects(ValidationErrorAfterHook): + self.assertRaises(ValidationError, + cnx.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"') + self.assertTrue(cnx.execute('Any X WHERE X is CWGroup, X name "toto"')) + with self.assertRaises(QueryError) as cm: + cnx.commit() + self.assertEqual(str(cm.exception), 'transaction must be rolled back') + cnx.rollback() + self.assertFalse(cnx.execute('Any X WHERE X is CWGroup, X name "toto"')) def test_rollback_on_execute_unauthorized(self): class UnauthorizedAfterHook(Hook): @@ -148,15 +148,17 @@ events = ('after_update_entity',) def __call__(self): raise Unauthorized() - with self.temporary_appobjects(UnauthorizedAfterHook): - self.assertRaises(Unauthorized, - self.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"') - self.assertTrue(self.execute('Any X WHERE X is CWGroup, X name "toto"')) - with self.assertRaises(QueryError) as cm: - self.commit() - self.assertEqual(str(cm.exception), 'transaction must be rolled back') - self.rollback() - self.assertFalse(self.execute('Any X WHERE X is CWGroup, X name "toto"')) + + with self.admin_access.repo_cnx() as cnx: + with self.temporary_appobjects(UnauthorizedAfterHook): + self.assertRaises(Unauthorized, + cnx.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"') + self.assertTrue(cnx.execute('Any X WHERE X is CWGroup, X name "toto"')) + with self.assertRaises(QueryError) as cm: + cnx.commit() + self.assertEqual(str(cm.exception), 'transaction must be rolled back') + cnx.rollback() + self.assertFalse(cnx.execute('Any X WHERE X is CWGroup, X name "toto"')) def test_close(self): @@ -234,7 +236,6 @@ cnxid = repo.connect(self.admlogin, password=self.admpassword) # rollback state change which trigger TrInfo insertion session = repo._get_session(cnxid) - session.set_cnxset() user = session.user user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') rset = repo.execute(cnxid, 'TrInfo T WHERE T wf_info_for X, X eid %(x)s', {'x': user.eid}) @@ -244,9 +245,6 @@ self.assertEqual(len(rset), 0) repo.close(cnxid) - def test_transaction_interleaved(self): - self.skipTest('implement me') - def test_close_kill_processing_request(self): repo = self.repo cnxid = repo.connect(self.admlogin, password=self.admpassword) @@ -424,12 +422,8 @@ cnxid = repo.connect(self.admlogin, password=self.admpassword) session = repo._get_session(cnxid, setcnxset=True) self.assertEqual(repo.type_and_source_from_eid(2, session), - ('CWGroup', 'system', None, 'system')) + ('CWGroup', None, 'system')) self.assertEqual(repo.type_from_eid(2, session), 'CWGroup') - self.assertEqual(repo.source_from_eid(2, session).uri, 'system') - self.assertEqual(repo.eid2extid(repo.system_source, 2, session), None) - class dummysource: uri = 'toto' - self.assertRaises(UnknownEid, repo.eid2extid, dummysource, 2, session) repo.close(cnxid) def test_public_api(self): @@ -445,7 +439,9 @@ repo = self.repo cnxid = repo.connect(self.admlogin, password=self.admpassword) self.assertEqual(repo.user_info(cnxid), (6, 'admin', set([u'managers']), {})) - self.assertEqual(repo.describe(cnxid, 2), (u'CWGroup', u'system', None, 'system')) + self.assertEqual({'type': u'CWGroup', 'extid': None, 'source': 'system'}, + repo.entity_metas(cnxid, 2)) + self.assertEqual(repo.describe(cnxid, 2), (u'CWGroup', 'system', None, 'system')) repo.close(cnxid) self.assertRaises(BadConnectionId, repo.user_info, cnxid) self.assertRaises(BadConnectionId, repo.describe, cnxid, 1) @@ -464,8 +460,9 @@ self.assertRaises(BadConnectionId, repo.get_shared_data, cnxid, 'data') def test_schema_is_relation(self): - no_is_rset = self.execute('Any X WHERE NOT X is ET') - self.assertFalse(no_is_rset, no_is_rset.description) + with self.admin_access.repo_cnx() as cnx: + no_is_rset = cnx.execute('Any X WHERE NOT X is ET') + self.assertFalse(no_is_rset, no_is_rset.description) # def test_perfo(self): # self.set_debug(True) @@ -478,28 +475,29 @@ # print 'test time: %.3f (time) %.3f (cpu)' % ((time() - t), clock() - c) def test_delete_if_singlecard1(self): - note = self.request().create_entity('Affaire') - p1 = self.request().create_entity('Personne', nom=u'toto') - self.execute('SET A todo_by P WHERE A eid %(x)s, P eid %(p)s', - {'x': note.eid, 'p': p1.eid}) - rset = self.execute('Any P WHERE A todo_by P, A eid %(x)s', - {'x': note.eid}) - self.assertEqual(len(rset), 1) - p2 = self.request().create_entity('Personne', nom=u'tutu') - self.execute('SET A todo_by P WHERE A eid %(x)s, P eid %(p)s', - {'x': note.eid, 'p': p2.eid}) - rset = self.execute('Any P WHERE A todo_by P, A eid %(x)s', - {'x': note.eid}) - self.assertEqual(len(rset), 1) - self.assertEqual(rset.rows[0][0], p2.eid) + with self.admin_access.repo_cnx() as cnx: + note = cnx.create_entity('Affaire') + p1 = cnx.create_entity('Personne', nom=u'toto') + cnx.execute('SET A todo_by P WHERE A eid %(x)s, P eid %(p)s', + {'x': note.eid, 'p': p1.eid}) + rset = cnx.execute('Any P WHERE A todo_by P, A eid %(x)s', + {'x': note.eid}) + self.assertEqual(len(rset), 1) + p2 = cnx.create_entity('Personne', nom=u'tutu') + cnx.execute('SET A todo_by P WHERE A eid %(x)s, P eid %(p)s', + {'x': note.eid, 'p': p2.eid}) + rset = cnx.execute('Any P WHERE A todo_by P, A eid %(x)s', + {'x': note.eid}) + self.assertEqual(len(rset), 1) + self.assertEqual(rset.rows[0][0], p2.eid) def test_delete_if_object_inlined_singlecard(self): - req = self.request() - c = req.create_entity('Card', title=u'Carte') - req.create_entity('Personne', nom=u'Vincent', fiche=c) - req.create_entity('Personne', nom=u'Florent', fiche=c) - self.commit() - self.assertEqual(len(c.reverse_fiche), 1) + with self.admin_access.repo_cnx() as cnx: + c = cnx.create_entity('Card', title=u'Carte') + cnx.create_entity('Personne', nom=u'Vincent', fiche=c) + cnx.create_entity('Personne', nom=u'Florent', fiche=c) + cnx.commit() + self.assertEqual(len(c.reverse_fiche), 1) def test_cw_set_in_before_update(self): # local hook @@ -514,13 +512,14 @@ if self.entity.eid not in pendings: pendings.add(self.entity.eid) self.entity.cw_set(alias=u'foo') - with self.temporary_appobjects(DummyBeforeHook): - req = self.request() - addr = req.create_entity('EmailAddress', address=u'a@b.fr') - addr.cw_set(address=u'a@b.com') - rset = self.execute('Any A,AA WHERE X eid %(x)s, X address A, X alias AA', - {'x': addr.eid}) - self.assertEqual(rset.rows, [[u'a@b.com', u'foo']]) + + with self.admin_access.repo_cnx() as cnx: + with self.temporary_appobjects(DummyBeforeHook): + addr = cnx.create_entity('EmailAddress', address=u'a@b.fr') + addr.cw_set(address=u'a@b.com') + rset = cnx.execute('Any A,AA WHERE X eid %(x)s, X address A, X alias AA', + {'x': addr.eid}) + self.assertEqual(rset.rows, [[u'a@b.com', u'foo']]) def test_cw_set_in_before_add(self): # local hook @@ -531,11 +530,12 @@ def __call__(self): # cw_set is forbidden within before_add_entity() self.entity.cw_set(alias=u'foo') - with self.temporary_appobjects(DummyBeforeHook): - req = self.request() - # XXX will fail with python -O - self.assertRaises(AssertionError, req.create_entity, - 'EmailAddress', address=u'a@b.fr') + + with self.admin_access.repo_cnx() as cnx: + with self.temporary_appobjects(DummyBeforeHook): + # XXX will fail with python -O + self.assertRaises(AssertionError, cnx.create_entity, + 'EmailAddress', address=u'a@b.fr') def test_multiple_edit_cw_set(self): """make sure cw_edited doesn't get cluttered @@ -552,11 +552,12 @@ self._test.assertFalse('invoiced' in self.entity.cw_edited, 'cw_edited cluttered by previous update') self.entity.cw_edited['invoiced'] = 10 - with self.temporary_appobjects(DummyBeforeHook): - req = self.request() - req.create_entity('Affaire', ref=u'AFF01') - req.create_entity('Affaire', ref=u'AFF02') - req.execute('SET A duration 10 WHERE A is Affaire') + + with self.admin_access.repo_cnx() as cnx: + with self.temporary_appobjects(DummyBeforeHook): + cnx.create_entity('Affaire', ref=u'AFF01') + cnx.create_entity('Affaire', ref=u'AFF02') + cnx.execute('SET A duration 10 WHERE A is Affaire') def test_user_friendly_error(self): @@ -566,20 +567,20 @@ def raise_user_exception(self): raise ValidationError(self.entity.eid, {'hip': 'hop'}) - with self.temporary_appobjects(MyIUserFriendlyUniqueTogether): - req = self.request() - s = req.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'75013') - self.commit() - with self.assertRaises(ValidationError) as cm: - req.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'75013') - self.assertEqual(cm.exception.errors, {'hip': 'hop'}) - self.rollback() - req.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'31400') - with self.assertRaises(ValidationError) as cm: - s.cw_set(cp=u'31400') - self.assertEqual(cm.exception.entity, s.eid) - self.assertEqual(cm.exception.errors, {'hip': 'hop'}) - self.rollback() + with self.admin_access.repo_cnx() as cnx: + with self.temporary_appobjects(MyIUserFriendlyUniqueTogether): + s = cnx.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'75013') + cnx.commit() + with self.assertRaises(ValidationError) as cm: + cnx.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'75013') + self.assertEqual(cm.exception.errors, {'hip': 'hop'}) + cnx.rollback() + cnx.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'31400') + with self.assertRaises(ValidationError) as cm: + s.cw_set(cp=u'31400') + self.assertEqual(cm.exception.entity, s.eid) + self.assertEqual(cm.exception.errors, {'hip': 'hop'}) + cnx.rollback() class SchemaDeserialTC(CubicWebTC): @@ -617,35 +618,39 @@ table = SQL_PREFIX + 'CWEType' namecol = SQL_PREFIX + 'name' finalcol = SQL_PREFIX + 'final' - self.session.set_cnxset() - cu = self.session.system_sql('SELECT %s FROM %s WHERE %s is NULL' % ( - namecol, table, finalcol)) - self.assertEqual(cu.fetchall(), []) - cu = self.session.system_sql('SELECT %s FROM %s WHERE %s=%%(final)s ORDER BY %s' - % (namecol, table, finalcol, namecol), {'final': True}) - self.assertEqual(cu.fetchall(), [(u'BabarTestType',), - (u'BigInt',), (u'Boolean',), (u'Bytes',), - (u'Date',), (u'Datetime',), - (u'Decimal',),(u'Float',), - (u'Int',), - (u'Interval',), (u'Password',), - (u'String',), - (u'TZDatetime',), (u'TZTime',), (u'Time',)]) - sql = ("SELECT etype.cw_eid, etype.cw_name, cstr.cw_eid, rel.eid_to " - "FROM cw_CWUniqueTogetherConstraint as cstr, " - " relations_relation as rel, " - " cw_CWEType as etype " - "WHERE cstr.cw_eid = rel.eid_from " - " AND cstr.cw_constraint_of = etype.cw_eid " - " AND etype.cw_name = 'Personne' " - ";") - cu = self.session.system_sql(sql) - rows = cu.fetchall() - self.assertEqual(len(rows), 3) - person = self.repo.schema.eschema('Personne') - self.assertEqual(len(person._unique_together), 1) - self.assertItemsEqual(person._unique_together[0], - ('nom', 'prenom', 'inline2')) + with self.admin_access.repo_cnx() as cnx: + with cnx.ensure_cnx_set: + cu = cnx.system_sql('SELECT %s FROM %s WHERE %s is NULL' + % (namecol, table, finalcol)) + self.assertEqual(cu.fetchall(), []) + cu = cnx.system_sql('SELECT %s FROM %s ' + 'WHERE %s=%%(final)s ORDER BY %s' + % (namecol, table, finalcol, namecol), + {'final': True}) + self.assertEqual(cu.fetchall(), + [(u'BabarTestType',), + (u'BigInt',), (u'Boolean',), (u'Bytes',), + (u'Date',), (u'Datetime',), + (u'Decimal',),(u'Float',), + (u'Int',), + (u'Interval',), (u'Password',), + (u'String',), + (u'TZDatetime',), (u'TZTime',), (u'Time',)]) + sql = ("SELECT etype.cw_eid, etype.cw_name, cstr.cw_eid, rel.eid_to " + "FROM cw_CWUniqueTogetherConstraint as cstr, " + " relations_relation as rel, " + " cw_CWEType as etype " + "WHERE cstr.cw_eid = rel.eid_from " + " AND cstr.cw_constraint_of = etype.cw_eid " + " AND etype.cw_name = 'Personne' " + ";") + cu = cnx.system_sql(sql) + rows = cu.fetchall() + self.assertEqual(len(rows), 3) + person = self.repo.schema.eschema('Personne') + self.assertEqual(len(person._unique_together), 1) + self.assertItemsEqual(person._unique_together[0], + ('nom', 'prenom', 'inline2')) finally: self.repo.set_schema(origshema) @@ -666,125 +671,90 @@ class DataHelpersTC(CubicWebTC): - def test_create_eid(self): - self.session.set_cnxset() - self.assert_(self.repo.system_source.create_eid(self.session)) - - def test_source_from_eid(self): - self.session.set_cnxset() - self.assertEqual(self.repo.source_from_eid(1, self.session), - self.repo.sources_by_uri['system']) - - def test_source_from_eid_raise(self): - self.session.set_cnxset() - self.assertRaises(UnknownEid, self.repo.source_from_eid, -2, self.session) - def test_type_from_eid(self): - self.session.set_cnxset() - self.assertEqual(self.repo.type_from_eid(2, self.session), 'CWGroup') + with self.admin_access.repo_cnx() as cnx: + with cnx.ensure_cnx_set: + self.assertEqual(self.repo.type_from_eid(2, cnx), 'CWGroup') def test_type_from_eid_raise(self): - self.session.set_cnxset() - self.assertRaises(UnknownEid, self.repo.type_from_eid, -2, self.session) + with self.admin_access.repo_cnx() as cnx: + with cnx.ensure_cnx_set: + self.assertRaises(UnknownEid, self.repo.type_from_eid, -2, cnx) def test_add_delete_info(self): - entity = self.repo.vreg['etypes'].etype_class('Personne')(self.session) - entity.eid = -1 - entity.complete = lambda x: None - self.session.set_cnxset() - self.repo.add_info(self.session, entity, self.repo.system_source) - cu = self.session.system_sql('SELECT * FROM entities WHERE eid = -1') - data = cu.fetchall() - self.assertIsInstance(data[0][4], datetime) - data[0] = list(data[0]) - data[0][4] = None - self.assertEqual(tuplify(data), [(-1, 'Personne', 'system', 'system', - None, None)]) - self.repo.delete_info(self.session, entity, 'system', None) - #self.repo.commit() - cu = self.session.system_sql('SELECT * FROM entities WHERE eid = -1') - data = cu.fetchall() - self.assertEqual(data, []) + with self.admin_access.repo_cnx() as cnx: + with cnx.ensure_cnx_set: + cnx.mode = 'write' + entity = self.repo.vreg['etypes'].etype_class('Personne')(cnx) + entity.eid = -1 + entity.complete = lambda x: None + self.repo.add_info(cnx, entity, self.repo.system_source) + cu = cnx.system_sql('SELECT * FROM entities WHERE eid = -1') + data = cu.fetchall() + self.assertEqual(tuplify(data), [(-1, 'Personne', 'system', None)]) + self.repo.delete_info(cnx, entity, 'system') + #self.repo.commit() + cu = cnx.system_sql('SELECT * FROM entities WHERE eid = -1') + data = cu.fetchall() + self.assertEqual(data, []) class FTITC(CubicWebTC): - def test_reindex_and_modified_since(self): - self.repo.system_source.multisources_etypes.add('Personne') - eidp = self.execute('INSERT Personne X: X nom "toto", X prenom "tutu"')[0][0] - self.commit() - ts = datetime.now() - self.assertEqual(len(self.execute('Personne X WHERE X has_text "tutu"')), 1) - self.session.set_cnxset() - cu = self.session.system_sql('SELECT mtime, eid FROM entities WHERE eid = %s' % eidp) - omtime = cu.fetchone()[0] - # our sqlite datetime adapter is ignore seconds fraction, so we have to - # ensure update is done the next seconds - time.sleep(1 - (ts.second - int(ts.second))) - self.execute('SET X nom "tata" WHERE X eid %(x)s', {'x': eidp}) - self.commit() - self.assertEqual(len(self.execute('Personne X WHERE X has_text "tutu"')), 1) - self.session.set_cnxset() - cu = self.session.system_sql('SELECT mtime FROM entities WHERE eid = %s' % eidp) - mtime = cu.fetchone()[0] - self.assertTrue(omtime < mtime) - self.commit() - date, modified, deleted = self.repo.entities_modified_since(('Personne',), omtime) - self.assertEqual(modified, [('Personne', eidp)]) - self.assertEqual(deleted, []) - date, modified, deleted = self.repo.entities_modified_since(('Personne',), mtime) - self.assertEqual(modified, []) - self.assertEqual(deleted, []) - self.execute('DELETE Personne X WHERE X eid %(x)s', {'x': eidp}) - self.commit() - date, modified, deleted = self.repo.entities_modified_since(('Personne',), omtime) - self.assertEqual(modified, []) - self.assertEqual(deleted, [('Personne', eidp)]) - def test_fulltext_container_entity(self): - assert self.schema.rschema('use_email').fulltext_container == 'subject' - req = self.request() - toto = req.create_entity('EmailAddress', address=u'toto@logilab.fr') - self.commit() - rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) - self.assertEqual(rset.rows, []) - req.user.cw_set(use_email=toto) - self.commit() - rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) - self.assertEqual(rset.rows, [[req.user.eid]]) - req.execute('DELETE X use_email Y WHERE X login "admin", Y eid %(y)s', - {'y': toto.eid}) - self.commit() - rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) - self.assertEqual(rset.rows, []) - tutu = req.create_entity('EmailAddress', address=u'tutu@logilab.fr') - req.user.cw_set(use_email=tutu) - self.commit() - rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'}) - self.assertEqual(rset.rows, [[req.user.eid]]) - tutu.cw_set(address=u'hip@logilab.fr') - self.commit() - rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'}) - self.assertEqual(rset.rows, []) - rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'hip'}) - self.assertEqual(rset.rows, [[req.user.eid]]) + with self.admin_access.repo_cnx() as cnx: + assert self.schema.rschema('use_email').fulltext_container == 'subject' + toto = cnx.create_entity('EmailAddress', address=u'toto@logilab.fr') + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) + self.assertEqual(rset.rows, []) + cnx.user.cw_set(use_email=toto) + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) + self.assertEqual(rset.rows, [[cnx.user.eid]]) + cnx.execute('DELETE X use_email Y WHERE X login "admin", Y eid %(y)s', + {'y': toto.eid}) + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) + self.assertEqual(rset.rows, []) + tutu = cnx.create_entity('EmailAddress', address=u'tutu@logilab.fr') + cnx.user.cw_set(use_email=tutu) + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'}) + self.assertEqual(rset.rows, [[cnx.user.eid]]) + tutu.cw_set(address=u'hip@logilab.fr') + cnx.commit() + rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'}) + self.assertEqual(rset.rows, []) + rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'hip'}) + self.assertEqual(rset.rows, [[cnx.user.eid]]) def test_no_uncessary_ftiindex_op(self): - req = self.request() - req.create_entity('Workflow', name=u'dummy workflow', description=u'huuuuu') - self.assertFalse(any(x for x in self.session.pending_operations - if isinstance(x, native.FTIndexEntityOp))) + with self.admin_access.repo_cnx() as cnx: + cnx.create_entity('Workflow', + name=u'dummy workflow', + description=u'huuuuu') + self.assertFalse(any(x for x in cnx.pending_operations + if isinstance(x, native.FTIndexEntityOp))) class DBInitTC(CubicWebTC): def test_versions_inserted(self): - inserted = [r[0] for r in self.execute('Any K ORDERBY K WHERE P pkey K, P pkey ~= "system.version.%"')] - self.assertEqual(inserted, - [u'system.version.basket', u'system.version.card', u'system.version.comment', - u'system.version.cubicweb', u'system.version.email', - u'system.version.file', u'system.version.folder', - u'system.version.localperms', u'system.version.tag']) + with self.admin_access.repo_cnx() as cnx: + inserted = [r[0] + for r in cnx.execute('Any K ORDERBY K ' + 'WHERE P pkey K, P pkey ~= "system.version.%"')] + self.assertEqual(inserted, + [u'system.version.basket', + u'system.version.card', + u'system.version.comment', + u'system.version.cubicweb', + u'system.version.email', + u'system.version.file', + u'system.version.folder', + u'system.version.localperms', + u'system.version.tag']) CALLED = [] @@ -795,11 +765,9 @@ CubicWebTC.setUp(self) CALLED[:] = () - def _after_relation_hook(self, cnxset, fromeid, rtype, toeid): - self.called.append((fromeid, rtype, toeid)) - def test_inline_relation(self): """make sure _relation hooks are called for inlined relation""" + class EcritParHook(hook.Hook): __regid__ = 'inlinedrelhook' __select__ = hook.Hook.__select__ & hook.match_rtype('ecrit_par') @@ -809,47 +777,51 @@ CALLED.append((self.event, self.eidfrom, self.rtype, self.eidto)) with self.temporary_appobjects(EcritParHook): - eidp = self.execute('INSERT Personne X: X nom "toto"')[0][0] - eidn = self.execute('INSERT Note X: X type "T"')[0][0] - self.execute('SET N ecrit_par Y WHERE N type "T", Y nom "toto"') - self.assertEqual(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp), - ('after_add_relation', eidn, 'ecrit_par', eidp)]) - CALLED[:] = () - self.execute('DELETE N ecrit_par Y WHERE N type "T", Y nom "toto"') - self.assertEqual(CALLED, [('before_delete_relation', eidn, 'ecrit_par', eidp), - ('after_delete_relation', eidn, 'ecrit_par', eidp)]) - CALLED[:] = () - eidn = self.execute('INSERT Note N: N ecrit_par P WHERE P nom "toto"')[0][0] - self.assertEqual(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp), - ('after_add_relation', eidn, 'ecrit_par', eidp)]) + with self.admin_access.repo_cnx() as cnx: + eidp = cnx.execute('INSERT Personne X: X nom "toto"')[0][0] + eidn = cnx.execute('INSERT Note X: X type "T"')[0][0] + cnx.execute('SET N ecrit_par Y WHERE N type "T", Y nom "toto"') + self.assertEqual(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp), + ('after_add_relation', eidn, 'ecrit_par', eidp)]) + CALLED[:] = () + cnx.execute('DELETE N ecrit_par Y WHERE N type "T", Y nom "toto"') + self.assertEqual(CALLED, [('before_delete_relation', eidn, 'ecrit_par', eidp), + ('after_delete_relation', eidn, 'ecrit_par', eidp)]) + CALLED[:] = () + eidn = cnx.execute('INSERT Note N: N ecrit_par P WHERE P nom "toto"')[0][0] + self.assertEqual(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp), + ('after_add_relation', eidn, 'ecrit_par', eidp)]) def test_unique_contraint(self): - req = self.request() - toto = req.create_entity('Personne', nom=u'toto') - a01 = req.create_entity('Affaire', ref=u'A01', todo_by=toto) - req.cnx.commit() - req = self.request() - req.create_entity('Note', type=u'todo', inline1=a01) - req.cnx.commit() - req = self.request() - req.create_entity('Note', type=u'todo', inline1=a01) - with self.assertRaises(ValidationError) as cm: - req.cnx.commit() - self.assertEqual(cm.exception.errors, {'inline1-subject': u'RQLUniqueConstraint S type T, S inline1 A1, A1 todo_by C, Y type T, Y inline1 A2, A2 todo_by C failed'}) + with self.admin_access.repo_cnx() as cnx: + toto = cnx.create_entity('Personne', nom=u'toto') + a01 = cnx.create_entity('Affaire', ref=u'A01', todo_by=toto) + cnx.commit() + cnx.create_entity('Note', type=u'todo', inline1=a01) + cnx.commit() + cnx.create_entity('Note', type=u'todo', inline1=a01) + with self.assertRaises(ValidationError) as cm: + cnx.commit() + self.assertEqual(cm.exception.errors, + {'inline1-subject': u'RQLUniqueConstraint S type T, S inline1 A1, ' + 'A1 todo_by C, Y type T, Y inline1 A2, A2 todo_by C failed'}) def test_add_relations_at_creation_with_del_existing_rel(self): - req = self.request() - person = req.create_entity('Personne', nom=u'Toto', prenom=u'Lanturlu', sexe=u'M') - users_rql = 'Any U WHERE U is CWGroup, U name "users"' - users = self.execute(users_rql).get_entity(0, 0) - req.create_entity('CWUser', - login=u'Toto', - upassword=u'firstname', - firstname=u'firstname', - surname=u'surname', - reverse_login_user=person, - in_group=users) - self.commit() + with self.admin_access.repo_cnx() as cnx: + person = cnx.create_entity('Personne', + nom=u'Toto', + prenom=u'Lanturlu', + sexe=u'M') + users_rql = 'Any U WHERE U is CWGroup, U name "users"' + users = cnx.execute(users_rql).get_entity(0, 0) + cnx.create_entity('CWUser', + login=u'Toto', + upassword=u'firstname', + firstname=u'firstname', + surname=u'surname', + reverse_login_user=person, + in_group=users) + cnx.commit() class PerformanceTest(CubicWebTC): @@ -866,160 +838,161 @@ logger.setLevel(logging.CRITICAL) def test_composite_deletion(self): - req = self.request() - personnes = [] - t0 = time.time() - for i in xrange(2000): - p = req.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') - personnes.append(p) - abraham = req.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') - for j in xrange(0, 2000, 100): - abraham.cw_set(personne_composite=personnes[j:j+100]) - t1 = time.time() - self.info('creation: %.2gs', (t1 - t0)) - req.cnx.commit() - t2 = time.time() - self.info('commit creation: %.2gs', (t2 - t1)) - self.execute('DELETE Personne P WHERE P eid %(eid)s', {'eid': abraham.eid}) - t3 = time.time() - self.info('deletion: %.2gs', (t3 - t2)) - req.cnx.commit() - t4 = time.time() - self.info("commit deletion: %2gs", (t4 - t3)) + with self.admin_access.repo_cnx() as cnx: + personnes = [] + t0 = time.time() + for i in xrange(2000): + p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') + personnes.append(p) + abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') + for j in xrange(0, 2000, 100): + abraham.cw_set(personne_composite=personnes[j:j+100]) + t1 = time.time() + self.info('creation: %.2gs', (t1 - t0)) + cnx.commit() + t2 = time.time() + self.info('commit creation: %.2gs', (t2 - t1)) + cnx.execute('DELETE Personne P WHERE P eid %(eid)s', {'eid': abraham.eid}) + t3 = time.time() + self.info('deletion: %.2gs', (t3 - t2)) + cnx.commit() + t4 = time.time() + self.info("commit deletion: %2gs", (t4 - t3)) def test_add_relation_non_inlined(self): - req = self.request() - personnes = [] - for i in xrange(2000): - p = req.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') - personnes.append(p) - req.cnx.commit() - t0 = time.time() - abraham = req.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M', - personne_composite=personnes[:100]) - t1 = time.time() - self.info('creation: %.2gs', (t1 - t0)) - for j in xrange(100, 2000, 100): - abraham.cw_set(personne_composite=personnes[j:j+100]) - t2 = time.time() - self.info('more relations: %.2gs', (t2-t1)) - req.cnx.commit() - t3 = time.time() - self.info('commit creation: %.2gs', (t3 - t2)) + with self.admin_access.repo_cnx() as cnx: + personnes = [] + for i in xrange(2000): + p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') + personnes.append(p) + cnx.commit() + t0 = time.time() + abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M', + personne_composite=personnes[:100]) + t1 = time.time() + self.info('creation: %.2gs', (t1 - t0)) + for j in xrange(100, 2000, 100): + abraham.cw_set(personne_composite=personnes[j:j+100]) + t2 = time.time() + self.info('more relations: %.2gs', (t2-t1)) + cnx.commit() + t3 = time.time() + self.info('commit creation: %.2gs', (t3 - t2)) def test_add_relation_inlined(self): - req = self.request() - personnes = [] - for i in xrange(2000): - p = req.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') - personnes.append(p) - req.cnx.commit() - t0 = time.time() - abraham = req.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M', - personne_inlined=personnes[:100]) - t1 = time.time() - self.info('creation: %.2gs', (t1 - t0)) - for j in xrange(100, 2000, 100): - abraham.cw_set(personne_inlined=personnes[j:j+100]) - t2 = time.time() - self.info('more relations: %.2gs', (t2-t1)) - req.cnx.commit() - t3 = time.time() - self.info('commit creation: %.2gs', (t3 - t2)) + with self.admin_access.repo_cnx() as cnx: + personnes = [] + for i in xrange(2000): + p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') + personnes.append(p) + cnx.commit() + t0 = time.time() + abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M', + personne_inlined=personnes[:100]) + t1 = time.time() + self.info('creation: %.2gs', (t1 - t0)) + for j in xrange(100, 2000, 100): + abraham.cw_set(personne_inlined=personnes[j:j+100]) + t2 = time.time() + self.info('more relations: %.2gs', (t2-t1)) + cnx.commit() + t3 = time.time() + self.info('commit creation: %.2gs', (t3 - t2)) def test_session_add_relation(self): """ to be compared with test_session_add_relations""" - req = self.request() - personnes = [] - for i in xrange(2000): - p = req.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') - personnes.append(p) - abraham = req.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') - req.cnx.commit() - t0 = time.time() - add_relation = self.session.add_relation - for p in personnes: - add_relation(abraham.eid, 'personne_composite', p.eid) - req.cnx.commit() - t1 = time.time() - self.info('add relation: %.2gs', t1-t0) + with self.admin_access.repo_cnx() as cnx: + personnes = [] + for i in xrange(2000): + p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') + personnes.append(p) + abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') + cnx.commit() + t0 = time.time() + add_relation = cnx.add_relation + for p in personnes: + add_relation(abraham.eid, 'personne_composite', p.eid) + cnx.commit() + t1 = time.time() + self.info('add relation: %.2gs', t1-t0) def test_session_add_relations (self): """ to be compared with test_session_add_relation""" - req = self.request() - personnes = [] - for i in xrange(2000): - p = req.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') - personnes.append(p) - abraham = req.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') - req.cnx.commit() - t0 = time.time() - add_relations = self.session.add_relations - relations = [('personne_composite', [(abraham.eid, p.eid) for p in personnes])] - add_relations(relations) - req.cnx.commit() - t1 = time.time() - self.info('add relations: %.2gs', t1-t0) + with self.admin_access.repo_cnx() as cnx: + personnes = [] + for i in xrange(2000): + p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') + personnes.append(p) + abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') + cnx.commit() + t0 = time.time() + add_relations = cnx.add_relations + relations = [('personne_composite', [(abraham.eid, p.eid) for p in personnes])] + add_relations(relations) + cnx.commit() + t1 = time.time() + self.info('add relations: %.2gs', t1-t0) def test_session_add_relation_inlined(self): """ to be compared with test_session_add_relations""" - req = self.request() - personnes = [] - for i in xrange(2000): - p = req.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') - personnes.append(p) - abraham = req.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') - req.cnx.commit() - t0 = time.time() - add_relation = self.session.add_relation - for p in personnes: - add_relation(abraham.eid, 'personne_inlined', p.eid) - req.cnx.commit() - t1 = time.time() - self.info('add relation (inlined): %.2gs', t1-t0) + with self.admin_access.repo_cnx() as cnx: + personnes = [] + for i in xrange(2000): + p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') + personnes.append(p) + abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') + cnx.commit() + t0 = time.time() + add_relation = cnx.add_relation + for p in personnes: + add_relation(abraham.eid, 'personne_inlined', p.eid) + cnx.commit() + t1 = time.time() + self.info('add relation (inlined): %.2gs', t1-t0) def test_session_add_relations_inlined (self): """ to be compared with test_session_add_relation""" - req = self.request() - personnes = [] - for i in xrange(2000): - p = req.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') - personnes.append(p) - abraham = req.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') - req.cnx.commit() - t0 = time.time() - add_relations = self.session.add_relations - relations = [('personne_inlined', [(abraham.eid, p.eid) for p in personnes])] - add_relations(relations) - req.cnx.commit() - t1 = time.time() - self.info('add relations (inlined): %.2gs', t1-t0) + with self.admin_access.repo_cnx() as cnx: + personnes = [] + for i in xrange(2000): + p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') + personnes.append(p) + abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') + cnx.commit() + t0 = time.time() + add_relations = cnx.add_relations + relations = [('personne_inlined', [(abraham.eid, p.eid) for p in personnes])] + add_relations(relations) + cnx.commit() + t1 = time.time() + self.info('add relations (inlined): %.2gs', t1-t0) def test_optional_relation_reset_1(self): - req = self.request() - p1 = req.create_entity('Personne', nom=u'Vincent') - p2 = req.create_entity('Personne', nom=u'Florent') - w = req.create_entity('Affaire', ref=u'wc') - w.cw_set(todo_by=[p1,p2]) - w.cw_clear_all_caches() - self.commit() - self.assertEqual(len(w.todo_by), 1) - self.assertEqual(w.todo_by[0].eid, p2.eid) + with self.admin_access.repo_cnx() as cnx: + p1 = cnx.create_entity('Personne', nom=u'Vincent') + p2 = cnx.create_entity('Personne', nom=u'Florent') + w = cnx.create_entity('Affaire', ref=u'wc') + w.cw_set(todo_by=[p1,p2]) + w.cw_clear_all_caches() + cnx.commit() + self.assertEqual(len(w.todo_by), 1) + self.assertEqual(w.todo_by[0].eid, p2.eid) def test_optional_relation_reset_2(self): - req = self.request() - p1 = req.create_entity('Personne', nom=u'Vincent') - p2 = req.create_entity('Personne', nom=u'Florent') - w = req.create_entity('Affaire', ref=u'wc') - w.cw_set(todo_by=p1) - self.commit() - w.cw_set(todo_by=p2) - w.cw_clear_all_caches() - self.commit() - self.assertEqual(len(w.todo_by), 1) - self.assertEqual(w.todo_by[0].eid, p2.eid) + with self.admin_access.repo_cnx() as cnx: + p1 = cnx.create_entity('Personne', nom=u'Vincent') + p2 = cnx.create_entity('Personne', nom=u'Florent') + w = cnx.create_entity('Affaire', ref=u'wc') + w.cw_set(todo_by=p1) + cnx.commit() + w.cw_set(todo_by=p2) + w.cw_clear_all_caches() + cnx.commit() + self.assertEqual(len(w.todo_by), 1) + self.assertEqual(w.todo_by[0].eid, p2.eid) if __name__ == '__main__': + from logilab.common.testlib import unittest_main unittest_main() diff -r 84738d495ffd -r 793377697c81 server/test/unittest_rqlannotation.py --- a/server/test/unittest_rqlannotation.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_rqlannotation.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ # -*- coding: iso-8859-1 -*- -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -21,340 +21,424 @@ from cubicweb.devtools import TestServerConfiguration, get_test_db_handler from cubicweb.devtools.repotest import BaseQuerierTC - -def setUpModule(*args): - handler = get_test_db_handler(TestServerConfiguration( - 'data2', apphome=SQLGenAnnotatorTC.datadir)) - handler.build_db_cache() - global repo, cnx - repo, cnx = handler.get_repo_and_cnx() - -def tearDownModule(*args): - global repo, cnx - del repo, cnx - - class SQLGenAnnotatorTC(BaseQuerierTC): def setUp(self): + handler = get_test_db_handler(TestServerConfiguration( + 'data2', apphome=SQLGenAnnotatorTC.datadir)) + handler.build_db_cache() + repo, _cnx = handler.get_repo_and_cnx() self.__class__.repo = repo super(SQLGenAnnotatorTC, self).setUp() def get_max_eid(self): # no need for cleanup here return None + def cleanup(self): # no need for cleanup here pass def test_0_1(self): - rqlst = self._prepare('Any SEN,RN,OEN WHERE X from_entity SE, SE eid 44, X relation_type R, R eid 139, X to_entity OE, OE eid 42, R name RN, SE name SEN, OE name OEN') - self.assertEqual(rqlst.defined_vars['SE']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['OE']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['R']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['SE'].stinfo['attrvar'], None) - self.assertEqual(rqlst.defined_vars['OE'].stinfo['attrvar'], None) - self.assertEqual(rqlst.defined_vars['R'].stinfo['attrvar'], None) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any SEN,RN,OEN WHERE X from_entity SE, ' + 'SE eid 44, X relation_type R, R eid 139, ' + 'X to_entity OE, OE eid 42, R name RN, SE name SEN, ' + 'OE name OEN') + self.assertEqual(rqlst.defined_vars['SE']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['OE']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['R']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['SE'].stinfo['attrvar'], None) + self.assertEqual(rqlst.defined_vars['OE'].stinfo['attrvar'], None) + self.assertEqual(rqlst.defined_vars['R'].stinfo['attrvar'], None) def test_0_2(self): - rqlst = self._prepare('Any O WHERE NOT S ecrit_par O, S eid 1, S inline1 P, O inline2 P') - self.assertEqual(rqlst.defined_vars['P']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['O'].stinfo['attrvar'], None) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any O WHERE NOT S ecrit_par O, S eid 1, ' + 'S inline1 P, O inline2 P') + self.assertEqual(rqlst.defined_vars['P']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['O'].stinfo['attrvar'], None) def test_0_4(self): - rqlst = self._prepare('Any A,B,C WHERE A eid 12,A comment B, A ?wf_info_for C') - self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) - self.assert_(rqlst.defined_vars['B'].stinfo['attrvar']) - self.assertEqual(rqlst.defined_vars['C']._q_invariant, False) - self.assertEqual(rqlst.solutions, [{'A': 'TrInfo', 'B': 'String', 'C': 'Affaire'}, - {'A': 'TrInfo', 'B': 'String', 'C': 'CWUser'}, - {'A': 'TrInfo', 'B': 'String', 'C': 'Note'}]) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any A,B,C WHERE A eid 12,A comment B, ' + 'A ?wf_info_for C') + self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) + self.assert_(rqlst.defined_vars['B'].stinfo['attrvar']) + self.assertEqual(rqlst.defined_vars['C']._q_invariant, False) + self.assertEqual(rqlst.solutions, [{'A': 'TrInfo', 'B': 'String', 'C': 'Affaire'}, + {'A': 'TrInfo', 'B': 'String', 'C': 'CWUser'}, + {'A': 'TrInfo', 'B': 'String', 'C': 'Note'}]) def test_0_5(self): - rqlst = self._prepare('Any P WHERE N ecrit_par P, N eid 0') - self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['P']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any P WHERE N ecrit_par P, N eid 0') + self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['P']._q_invariant, True) def test_0_6(self): - rqlst = self._prepare('Any P WHERE NOT N ecrit_par P, N eid 512') - self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any P WHERE NOT N ecrit_par P, N eid 512') + self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) def test_0_7(self): - rqlst = self._prepare('Personne X,Y where X nom NX, Y nom NX, X eid XE, not Y eid XE') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) - self.assert_(rqlst.defined_vars['XE'].stinfo['attrvar']) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Personne X,Y where X nom NX, ' + 'Y nom NX, X eid XE, not Y eid XE') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + self.assert_(rqlst.defined_vars['XE'].stinfo['attrvar']) def test_0_8(self): - rqlst = self._prepare('Any P WHERE X eid 0, NOT X connait P') - self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) - #self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - self.assertEqual(len(rqlst.solutions), 1, rqlst.solutions) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any P WHERE X eid 0, NOT X connait P') + self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) + #self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(len(rqlst.solutions), 1, rqlst.solutions) def test_0_10(self): - rqlst = self._prepare('Any X WHERE X concerne Y, Y is Note') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X concerne Y, Y is Note') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_0_11(self): - rqlst = self._prepare('Any X WHERE X todo_by Y, X is Affaire') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X todo_by Y, X is Affaire') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_0_12(self): - rqlst = self._prepare('Personne P WHERE P concerne A, A concerne S, S nom "Logilab"') - self.assertEqual(rqlst.defined_vars['P']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['A']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['S']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Personne P WHERE P concerne A, ' + 'A concerne S, S nom "Logilab"') + self.assertEqual(rqlst.defined_vars['P']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['A']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, False) def test_1_0(self): - rqlst = self._prepare('Any X,Y WHERE X created_by Y, X eid 5, NOT Y eid 6') - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X,Y WHERE X created_by Y, ' + 'X eid 5, NOT Y eid 6') + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_1_1(self): - rqlst = self._prepare('Any X,Y WHERE X created_by Y, X eid 5, NOT Y eid IN (6,7)') - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X,Y WHERE X created_by Y, X eid 5, ' + 'NOT Y eid IN (6,7)') + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_2(self): - rqlst = self._prepare('Any X WHERE X identity Y, Y eid 1') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X identity Y, Y eid 1') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) def test_7(self): - rqlst = self._prepare('Personne X,Y where X nom NX, Y nom NX, X eid XE, not Y eid XE') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Personne X,Y where X nom NX, Y nom NX, ' + 'X eid XE, not Y eid XE') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_8(self): - # DISTINCT Any P WHERE P require_group %(g)s, NOT %(u)s has_group_permission P, P is CWPermission - rqlst = self._prepare('DISTINCT Any X WHERE A concerne X, NOT N migrated_from X, ' - 'X is Note, N eid 1') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + with self.session.new_cnx() as cnx: + # DISTINCT Any P WHERE P require_group %(g)s, + # NOT %(u)s has_group_permission P, P is CWPermission + rqlst = self._prepare(cnx, 'DISTINCT Any X WHERE A concerne X, ' + 'NOT N migrated_from X, ' + 'X is Note, N eid 1') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) def test_diff_scope_identity_deamb(self): - rqlst = self._prepare('Any X WHERE X concerne Y, Y is Note, EXISTS(Y identity Z, Z migrated_from N)') - self.assertEqual(rqlst.defined_vars['Z']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X concerne Y, Y is Note, ' + 'EXISTS(Y identity Z, Z migrated_from N)') + self.assertEqual(rqlst.defined_vars['Z']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_optional_inlined(self): - rqlst = self._prepare('Any X,S where X from_state S?') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X,S where X from_state S?') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) def test_optional_inlined_2(self): - rqlst = self._prepare('Any N,A WHERE N? inline1 A') - self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any N,A WHERE N? inline1 A') + self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) def test_optional_1(self): - rqlst = self._prepare('Any X,S WHERE X travaille S?') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X,S WHERE X travaille S?') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) def test_greater_eid(self): - rqlst = self._prepare('Any X WHERE X eid > 5') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X eid > 5') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_greater_eid_typed(self): - rqlst = self._prepare('Any X WHERE X eid > 5, X is Note') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X eid > 5, X is Note') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_max_eid(self): - rqlst = self._prepare('Any MAX(X)') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any MAX(X)') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_max_eid_typed(self): - rqlst = self._prepare('Any MAX(X) WHERE X is Note') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any MAX(X) WHERE X is Note') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) def test_all_entities(self): - rqlst = self._prepare('Any X') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_all_typed_entity(self): - rqlst = self._prepare('Any X WHERE X is Note') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X is Note') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) def test_has_text_1(self): - rqlst = self._prepare('Any X WHERE X has_text "toto tata"') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['X'].stinfo['principal'].r_type, 'has_text') + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X has_text "toto tata"') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X'].stinfo['principal'].r_type, + 'has_text') def test_has_text_2(self): - rqlst = self._prepare('Any X WHERE X is Personne, X has_text "coucou"') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['X'].stinfo['principal'].r_type, 'has_text') + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X is Personne, ' + 'X has_text "coucou"') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X'].stinfo['principal'].r_type, + 'has_text') def test_not_relation_1(self): - # P can't be invariant since deambiguification caused by "NOT X require_permission P" - # is not considered by generated sql (NOT EXISTS(...)) - rqlst = self._prepare('Any P,G WHERE P require_group G, NOT X require_permission P') - self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['G']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + with self.session.new_cnx() as cnx: + # P can't be invariant since deambiguification caused by "NOT X require_permission P" + # is not considered by generated sql (NOT EXISTS(...)) + rqlst = self._prepare(cnx, 'Any P,G WHERE P require_group G, ' + 'NOT X require_permission P') + self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['G']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_not_relation_2(self): - rqlst = self._prepare('TrInfo X WHERE X eid 2, NOT X from_state Y, Y is State') - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'TrInfo X WHERE X eid 2, ' + 'NOT X from_state Y, Y is State') + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) def test_not_relation_3(self): - rqlst = self._prepare('Any X, Y WHERE X eid 1, Y eid in (2, 3)') - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X, Y WHERE X eid 1, Y eid in (2, 3)') + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_not_relation_4_1(self): - rqlst = self._prepare('Note X WHERE NOT Y evaluee X') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Note X WHERE NOT Y evaluee X') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_not_relation_4_2(self): - rqlst = self._prepare('Any X WHERE NOT Y evaluee X') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE NOT Y evaluee X') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_not_relation_4_3(self): - rqlst = self._prepare('Any Y WHERE NOT Y evaluee X') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any Y WHERE NOT Y evaluee X') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_not_relation_4_4(self): - rqlst = self._prepare('Any X WHERE NOT Y evaluee X, Y is CWUser') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE NOT Y evaluee X, Y is CWUser') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_not_relation_4_5(self): - rqlst = self._prepare('Any X WHERE NOT Y evaluee X, Y eid %s, X is Note' % self.ueid) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.solutions, [{'X': 'Note'}]) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE NOT Y evaluee X, ' + 'Y eid %s, X is Note' % self.ueid) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.solutions, [{'X': 'Note'}]) def test_not_relation_5_1(self): - rqlst = self._prepare('Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), NOT X read_permission Y') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X,Y WHERE X name "CWGroup", ' + 'Y eid IN(1, 2, 3), NOT X read_permission Y') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_not_relation_5_2(self): - rqlst = self._prepare('DISTINCT Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), NOT X read_permission Y') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'DISTINCT Any X,Y WHERE X name "CWGroup", ' + 'Y eid IN(1, 2, 3), NOT X read_permission Y') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_not_relation_6(self): - rqlst = self._prepare('Personne P where NOT P concerne A') - self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['A']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Personne P where NOT P concerne A') + self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['A']._q_invariant, True) def test_not_relation_7(self): - rqlst = self._prepare('Any K,V WHERE P is CWProperty, P pkey K, P value V, NOT P for_user U') - self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['U']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any K,V WHERE P is CWProperty, ' + 'P pkey K, P value V, NOT P for_user U') + self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['U']._q_invariant, True) def test_exists_1(self): - rqlst = self._prepare('Any U WHERE U eid IN (1,2), EXISTS(X owned_by U)') - self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any U WHERE U eid IN (1,2), EXISTS(X owned_by U)') + self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_exists_2(self): - rqlst = self._prepare('Any U WHERE EXISTS(U eid IN (1,2), X owned_by U)') - self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any U WHERE EXISTS(U eid IN (1,2), X owned_by U)') + self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_exists_3(self): - rqlst = self._prepare('Any U WHERE EXISTS(X owned_by U, X bookmarked_by U)') - self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any U WHERE EXISTS(X owned_by U, X bookmarked_by U)') + self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_exists_4(self): - rqlst = self._prepare('Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), EXISTS(X read_permission Y)') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X,Y WHERE X name "CWGroup", ' + 'Y eid IN(1, 2, 3), EXISTS(X read_permission Y)') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_exists_5(self): - rqlst = self._prepare('DISTINCT Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), EXISTS(X read_permission Y)') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'DISTINCT Any X,Y WHERE X name "CWGroup", ' + 'Y eid IN(1, 2, 3), EXISTS(X read_permission Y)') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_not_exists_1(self): - rqlst = self._prepare('Any U WHERE NOT EXISTS(X owned_by U, X bookmarked_by U)') - self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any U WHERE NOT EXISTS(X owned_by U, ' + 'X bookmarked_by U)') + self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_not_exists_2(self): - rqlst = self._prepare('Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)') - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X,Y WHERE X name "CWGroup", ' + 'Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)') + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_not_exists_distinct_1(self): - rqlst = self._prepare('DISTINCT Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)') - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'DISTINCT Any X,Y WHERE X name "CWGroup", ' + 'Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)') + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_or_1(self): - rqlst = self._prepare('Any X WHERE X concerne B OR C concerne X, B eid 12, C eid 13') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X concerne B OR ' + 'C concerne X, B eid 12, C eid 13') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) def test_or_2(self): - rqlst = self._prepare('Any X WHERE X created_by U, X concerne B OR C concerne X, B eid 12, C eid 13') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['U']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['X'].stinfo['principal'].r_type, 'created_by') + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X created_by U, X concerne B OR ' + 'C concerne X, B eid 12, C eid 13') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['U']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X'].stinfo['principal'].r_type, 'created_by') def test_or_3(self): - rqlst = self._prepare('Any N WHERE A evaluee N or EXISTS(N todo_by U)') - self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['A']._q_invariant, True) - self.assertEqual(rqlst.defined_vars['U']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any N WHERE A evaluee N or EXISTS(N todo_by U)') + self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['A']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['U']._q_invariant, True) def test_or_exists_1(self): - # query generated by security rewriting - rqlst = self._prepare('DISTINCT Any A,S WHERE A is Affaire, S nom "chouette", S is IN(Division, Societe, SubDivision),' - '(EXISTS(A owned_by D)) ' - 'OR ((((EXISTS(E concerne C?, C owned_by D, A identity E, C is Note, E is Affaire)) ' - 'OR (EXISTS(I concerne H?, H owned_by D, H is Societe, A identity I, I is Affaire))) ' - 'OR (EXISTS(J concerne G?, G owned_by D, G is SubDivision, A identity J, J is Affaire))) ' - 'OR (EXISTS(K concerne F?, F owned_by D, F is Division, A identity K, K is Affaire)))') - self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['S']._q_invariant, False) + with self.session.new_cnx() as cnx: + # query generated by security rewriting + rqlst = self._prepare(cnx, 'DISTINCT Any A,S WHERE A is Affaire, S nom "chouette", ' + 'S is IN(Division, Societe, SubDivision),' + '(EXISTS(A owned_by D)) ' + 'OR ((((EXISTS(E concerne C?, C owned_by D, A identity E, ' + ' C is Note, E is Affaire)) ' + 'OR (EXISTS(I concerne H?, H owned_by D, H is Societe, ' + ' A identity I, I is Affaire))) ' + 'OR (EXISTS(J concerne G?, G owned_by D, G is SubDivision, ' + ' A identity J, J is Affaire))) ' + 'OR (EXISTS(K concerne F?, F owned_by D, F is Division, ' + ' A identity K, K is Affaire)))') + self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, False) def test_or_exists_2(self): - rqlst = self._prepare('Any U WHERE EXISTS(U in_group G, G name "managers") OR EXISTS(X owned_by U, X bookmarked_by U)') - self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['G']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any U WHERE EXISTS(U in_group G, G name "managers") OR ' + 'EXISTS(X owned_by U, X bookmarked_by U)') + self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['G']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_or_exists_3(self): - rqlst = self._prepare('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 ' - 'WHERE C is Societe, S concerne C, C nom CS, ' - '(EXISTS(S owned_by D)) OR (EXISTS(S documented_by N, N title "published"))') - self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) - rqlst = self._prepare('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 ' - 'WHERE S is Affaire, C is Societe, S concerne C, C nom CS, ' - '(EXISTS(S owned_by D)) OR (EXISTS(S documented_by N, N title "published"))') - self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 ' + 'WHERE C is Societe, S concerne C, C nom CS, ' + '(EXISTS(S owned_by D)) OR (EXISTS(S documented_by N, N title "published"))') + self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) + rqlst = self._prepare(cnx, 'Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 ' + 'WHERE S is Affaire, C is Societe, S concerne C, C nom CS, ' + '(EXISTS(S owned_by D)) OR (EXISTS(S documented_by N, N title "published"))') + self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) def test_nonregr_ambiguity(self): - rqlst = self._prepare('Note N WHERE N attachment F') - # N may be an image as well, not invariant - self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['F']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Note N WHERE N attachment F') + # N may be an image as well, not invariant + self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['F']._q_invariant, True) def test_nonregr_ambiguity_2(self): - rqlst = self._prepare('Any S,SN WHERE X has_text "tot", X in_state S, S name SN, X is CWUser') - # X use has_text but should not be invariant as ambiguous, and has_text - # may not be its principal - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['S']._q_invariant, False) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any S,SN WHERE X has_text "tot", X in_state S, S name SN, X is CWUser') + # X use has_text but should not be invariant as ambiguous, and has_text + # may not be its principal + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, False) def test_remove_from_deleted_source_1(self): - rqlst = self._prepare('Note X WHERE X eid 999998, NOT X cw_source Y') - self.assertFalse('X' in rqlst.defined_vars) # simplified - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Note X WHERE X eid 999998, NOT X cw_source Y') + self.assertNotIn('X', rqlst.defined_vars) # simplified + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_remove_from_deleted_source_2(self): - rqlst = self._prepare('Note X WHERE X eid IN (999998, 999999), NOT X cw_source Y') - self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) - self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) - + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Note X WHERE X eid IN (999998, 999999), NOT X cw_source Y') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_has_text_security_cache_bug(self): - rqlst = self._prepare('Any X WHERE X has_text "toto" WITH X BEING ' - '(Any C WHERE C is Societe, C nom CS)') - self.assertTrue(rqlst.parent.has_text_query) + with self.session.new_cnx() as cnx: + rqlst = self._prepare(cnx, 'Any X WHERE X has_text "toto" WITH X BEING ' + '(Any C WHERE C is Societe, C nom CS)') + self.assertTrue(rqlst.parent.has_text_query) if __name__ == '__main__': from logilab.common.testlib import unittest_main diff -r 84738d495ffd -r 793377697c81 server/test/unittest_schemaserial.py --- a/server/test/unittest_schemaserial.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_schemaserial.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -26,6 +26,10 @@ from cubicweb.schema import CubicWebSchemaLoader from cubicweb.devtools import TestServerConfiguration +from cubicweb.server.schemaserial import (updateeschema2rql, updaterschema2rql, rschema2rql, + eschema2rql, rdef2rql, specialize2rql, + _erperms2rql as erperms2rql) + from logilab.database import get_db_helper from yams import register_base_type, unregister_base_type @@ -53,9 +57,6 @@ helper.TYPE_MAPPING.pop('BabarTestType', None) helper.TYPE_CONVERTERS.pop('BabarTestType', None) -from cubicweb.server.schemaserial import * -from cubicweb.server.schemaserial import _erperms2rql as erperms2rql - cstrtypemap = {'RQLConstraint': 'RQLConstraint_eid', 'SizeConstraint': 'SizeConstraint_eid', 'StaticVocabularyConstraint': 'StaticVocabularyConstraint_eid', @@ -67,7 +68,9 @@ def test_eschema2rql1(self): self.assertListEqual([ ('INSERT CWEType X: X description %(description)s,X final %(final)s,X name %(name)s', - {'description': u'define a final relation: link a final relation type from a non final entity to a final entity type. used to build the instance schema', + {'description': u'define a final relation: ' + 'link a final relation type from a non final entity ' + 'to a final entity type. used to build the instance schema', 'name': u'CWAttribute', 'final': False})], list(eschema2rql(schema.eschema('CWAttribute')))) @@ -82,15 +85,12 @@ self.assertListEqual([('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', {'et': None, 'x': None}), ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', - {'et': None, 'x': None}), - ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', - {'et': None, 'x': None}), - ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', {'et': None, 'x': None})], sorted(specialize2rql(schema))) def test_esche2rql_custom_type(self): - expected = [('INSERT CWEType X: X description %(description)s,X final %(final)s,X name %(name)s', + expected = [('INSERT CWEType X: X description %(description)s,X final %(final)s,' + 'X name %(name)s', {'description': u'', 'name': u'BabarTestType', 'final': True},)] got = list(eschema2rql(schema.eschema('BabarTestType'))) @@ -98,69 +98,180 @@ def test_rschema2rql1(self): self.assertListEqual([ - ('INSERT CWRType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s', - {'description': u'link a relation definition to its relation type', 'symmetric': False, 'name': u'relation_type', 'final' : False, 'fulltext_container': None, 'inlined': True}), + ('INSERT CWRType X: X description %(description)s,X final %(final)s,' + 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' + 'X name %(name)s,X symmetric %(symmetric)s', + {'description': u'link a relation definition to its relation type', + 'symmetric': False, + 'name': u'relation_type', + 'final' : False, + 'fulltext_container': None, + 'inlined': True}), - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', {'se': None, 'rt': None, 'oe': None, - 'description': u'', 'composite': u'object', 'cardinality': u'1*', + 'description': u'', + 'composite': u'object', + 'cardinality': u'1*', 'ordernum': 1}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s', - {'x': None, 'ct': u'RQLConstraint_eid', 'value': u';O;O final TRUE\n'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', + {'x': None, 'ct': u'RQLConstraint_eid', + 'value': u';O;O final TRUE\n'}), - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', {'se': None, 'rt': None, 'oe': None, - 'description': u'', 'composite': u'object', + 'description': u'', 'composite': u'object', 'ordernum': 1, 'cardinality': u'1*'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s', + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', {'x': None, 'ct': u'RQLConstraint_eid', 'value': u';O;O final FALSE\n'}), ], list(rschema2rql(schema.rschema('relation_type'), cstrtypemap))) def test_rschema2rql2(self): self.assertListEqual([ - ('INSERT CWRType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s', {'description': u'', 'symmetric': False, 'name': u'add_permission', 'final': False, 'fulltext_container': None, 'inlined': False}), + ('INSERT CWRType X: X description %(description)s,X final %(final)s,' + 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' + 'X name %(name)s,X symmetric %(symmetric)s', + {'description': u'', + 'symmetric': False, + 'name': u'add_permission', + 'final': False, + 'fulltext_container': None, + 'inlined': False}), - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, 'rt': None, 'oe': None, - 'description': u'groups allowed to add entities/relations of this type', 'composite': None, 'ordernum': 9999, 'cardinality': u'**'}), - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, 'rt': None, 'oe': None, - 'description': u'rql expression allowing to add entities/relations of this type', 'composite': 'subject', 'ordernum': 9999, 'cardinality': u'*?'}), + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, + 'rt': None, + 'oe': None, + 'description': u'groups allowed to add entities/relations of this type', + 'composite': None, + 'ordernum': 9999, + 'cardinality': u'**'}), + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, + 'rt': None, + 'oe': None, + 'description': u'rql expression allowing to add entities/relations of this type', + 'composite': 'subject', + 'ordernum': 9999, + 'cardinality': u'*?'}), - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, 'rt': None, 'oe': None, - 'description': u'groups allowed to add entities/relations of this type', 'composite': None, 'ordernum': 9999, 'cardinality': u'**'}), - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, 'rt': None, 'oe': None, - 'description': u'rql expression allowing to add entities/relations of this type', 'composite': 'subject', 'ordernum': 9999, 'cardinality': u'*?'}), - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'cardinality': u'**', 'composite': None, 'description': u'groups allowed to add entities/relations of this type', - 'oe': None, 'ordernum': 9999, 'rt': None, 'se': None}), - ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'cardinality': u'*?', 'composite': u'subject', 'description': u'rql expression allowing to add entities/relations of this type', 'oe': None, 'ordernum': 9999, 'rt': None, 'se': None})], + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, + 'rt': None, + 'oe': None, + 'description': u'groups allowed to add entities/relations of this type', + 'composite': None, + 'ordernum': 9999, + 'cardinality': u'**'}), + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, + 'rt': None, + 'oe': None, + 'description': u'rql expression allowing to add entities/relations of this type', + 'composite': 'subject', + 'ordernum': 9999, + 'cardinality': u'*?'}), + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'cardinality': u'**', + 'composite': None, + 'description': u'groups allowed to add entities/relations of this type', + 'oe': None, + 'ordernum': 9999, + 'rt': None, + 'se': None}), + ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,' + 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,' + 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'cardinality': u'*?', + 'composite': u'subject', + 'description': u'rql expression allowing to add entities/relations of this type', + 'oe': None, + 'ordernum': 9999, + 'rt': None, + 'se': None})], list(rschema2rql(schema.rschema('add_permission'), cstrtypemap))) def test_rschema2rql3(self): self.assertListEqual([ - ('INSERT CWRType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s', - {'description': u'', 'symmetric': False, 'name': u'cardinality', 'final': True, 'fulltext_container': None, 'inlined': False}), + ('INSERT CWRType X: X description %(description)s,X final %(final)s,' + 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' + 'X name %(name)s,X symmetric %(symmetric)s', + {'description': u'', + 'symmetric': False, + 'name': u'cardinality', + 'final': True, + 'fulltext_container': None, + 'inlined': False}), - ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, 'rt': None, 'oe': None, - 'description': u'subject/object cardinality', 'internationalizable': True, 'fulltextindexed': False, 'ordernum': 5, 'defaultval': None, 'indexed': False, 'cardinality': u'?1'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s', - {'x': None, 'ct': u'SizeConstraint_eid', 'value': u'max=2'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s', - {'x': None, 'ct': u'StaticVocabularyConstraint_eid', 'value': u"u'?1', u'11'"}), + ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,' + 'X description %(description)s,X fulltextindexed %(fulltextindexed)s,' + 'X indexed %(indexed)s,X internationalizable %(internationalizable)s,' + 'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,' + 'X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, + 'rt': None, + 'oe': None, + 'description': u'subject/object cardinality', + 'internationalizable': True, + 'fulltextindexed': False, + 'ordernum': 5, + 'defaultval': None, + 'indexed': False, + 'cardinality': u'?1'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', + {'x': None, + 'ct': u'SizeConstraint_eid', + 'value': u'max=2'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', + {'x': None, + 'ct': u'StaticVocabularyConstraint_eid', + 'value': u"u'?1', u'11'"}), - ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, 'rt': None, 'oe': None, - 'description': u'subject/object cardinality', 'internationalizable': True, 'fulltextindexed': False, 'ordernum': 5, 'defaultval': None, 'indexed': False, 'cardinality': u'?1'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s', - {'x': None, 'ct': u'SizeConstraint_eid', 'value': u'max=2'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s', - {'x': None, 'ct': u'StaticVocabularyConstraint_eid', 'value': u"u'?*', u'1*', u'+*', u'**', u'?+', u'1+', u'++', u'*+', u'?1', u'11', u'+1', u'*1', u'??', u'1?', u'+?', u'*?'"})], + ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,' + 'X description %(description)s,X fulltextindexed %(fulltextindexed)s,' + 'X indexed %(indexed)s,X internationalizable %(internationalizable)s,' + 'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE ' + 'WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, + 'rt': None, + 'oe': None, + 'description': u'subject/object cardinality', + 'internationalizable': True, + 'fulltextindexed': False, + 'ordernum': 5, + 'defaultval': None, + 'indexed': False, + 'cardinality': u'?1'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', + {'x': None, + 'ct': u'SizeConstraint_eid', + 'value': u'max=2'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', + {'x': None, + 'ct': u'StaticVocabularyConstraint_eid', + 'value': (u"u'?*', u'1*', u'+*', u'**', u'?+', u'1+', u'++', u'*+', u'?1', " + "u'11', u'+1', u'*1', u'??', u'1?', u'+?', u'*?'")})], list(rschema2rql(schema.rschema('cardinality'), cstrtypemap))) def test_rschema2rql_custom_type(self): @@ -184,7 +295,7 @@ 'extra_props': '{"jungle_speed": 42}', 'indexed': False, 'oe': None, - 'ordernum': 19, + 'ordernum': 4, 'rt': None, 'se': None})] @@ -200,41 +311,73 @@ def test_rdef2rql(self): self.assertListEqual([ - ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', - {'se': None, 'rt': None, 'oe': None, - 'description': u'', 'internationalizable': True, 'fulltextindexed': False, - 'ordernum': 3, 'defaultval': Binary.zpickle(u'text/plain'), 'indexed': False, 'cardinality': u'?1'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s', - {'x': None, 'value': u'None', 'ct': 'FormatConstraint_eid'}), - ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s', - {'x': None, 'value': u'max=50', 'ct': 'SizeConstraint_eid'})], - list(rdef2rql(schema['description_format'].rdefs[('CWRType', 'String')], cstrtypemap))) - + ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,' + 'X description %(description)s,X fulltextindexed %(fulltextindexed)s,' + 'X indexed %(indexed)s,X internationalizable %(internationalizable)s,' + 'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,' + 'X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', + {'se': None, + 'rt': None, + 'oe': None, + 'description': u'', + 'internationalizable': True, + 'fulltextindexed': False, + 'ordernum': 3, + 'defaultval': Binary.zpickle(u'text/plain'), + 'indexed': False, + 'cardinality': u'?1'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', + {'x': None, + 'value': u'None', + 'ct': 'FormatConstraint_eid'}), + ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' + 'WHERE CT eid %(ct)s, EDEF eid %(x)s', + {'x': None, + 'value': u'max=50', + 'ct': 'SizeConstraint_eid'})], + list(rdef2rql(schema['description_format'].rdefs[('CWRType', 'String')], + cstrtypemap))) def test_updateeschema2rql1(self): - self.assertListEqual([('SET X description %(description)s,X final %(final)s,X name %(name)s WHERE X eid %(x)s', - {'description': u'define a final relation: link a final relation type from a non final entity to a final entity type. used to build the instance schema', 'x': 1, 'final': False, 'name': u'CWAttribute'})], + self.assertListEqual([('SET X description %(description)s,X final %(final)s,' + 'X name %(name)s WHERE X eid %(x)s', + {'description': u'define a final relation: link a final relation type from' + ' a non final entity to a final entity type. used to build the instance schema', + 'x': 1, 'final': False, 'name': u'CWAttribute'})], list(updateeschema2rql(schema.eschema('CWAttribute'), 1))) def test_updateeschema2rql2(self): - self.assertListEqual([('SET X description %(description)s,X final %(final)s,X name %(name)s WHERE X eid %(x)s', + self.assertListEqual([('SET X description %(description)s,X final %(final)s,' + 'X name %(name)s WHERE X eid %(x)s', {'description': u'', 'x': 1, 'final': True, 'name': u'String'})], list(updateeschema2rql(schema.eschema('String'), 1))) def test_updaterschema2rql1(self): self.assertListEqual([ - ('SET X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s WHERE X eid %(x)s', - {'x': 1, 'symmetric': False, + ('SET X description %(description)s,X final %(final)s,' + 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' + 'X name %(name)s,X symmetric %(symmetric)s WHERE X eid %(x)s', + {'x': 1, + 'symmetric': False, 'description': u'link a relation definition to its relation type', - 'final': False, 'fulltext_container': None, 'inlined': True, 'name': u'relation_type'})], + 'final': False, 'fulltext_container': None, + 'inlined': True, + 'name': u'relation_type'})], list(updaterschema2rql(schema.rschema('relation_type'), 1))) def test_updaterschema2rql2(self): expected = [ - ('SET X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s WHERE X eid %(x)s', - {'x': 1, 'symmetric': False, - 'description': u'', 'final': False, 'fulltext_container': None, - 'inlined': False, 'name': u'add_permission'}) + ('SET X description %(description)s,X final %(final)s,' + 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,' + 'X name %(name)s,X symmetric %(symmetric)s WHERE X eid %(x)s', + {'x': 1, + 'symmetric': False, + 'description': u'', + 'final': False, + 'fulltext_container': None, + 'inlined': False, + 'name': u'add_permission'}) ] for i, (rql, args) in enumerate(updaterschema2rql(schema.rschema('add_permission'), 1)): yield self.assertEqual, expected[i], (rql, args) diff -r 84738d495ffd -r 793377697c81 server/test/unittest_security.py --- a/server/test/unittest_security.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_security.py Wed Sep 24 18:04:30 2014 +0200 @@ -17,11 +17,7 @@ # with CubicWeb. If not, see . """functional tests for server'security""" -import sys - -from logilab.common.testlib import unittest_main, TestCase - -from rql import RQLException +from logilab.common.testlib import unittest_main from cubicweb.devtools.testlib import CubicWebTC from cubicweb import Unauthorized, ValidationError, QueryError, Binary @@ -34,9 +30,10 @@ def setup_database(self): super(BaseSecurityTC, self).setup_database() - self.create_user(self.request(), 'iaminusersgrouponly') - hash = _CRYPTO_CTX.encrypt('oldpassword', scheme='des_crypt') - self.create_user(self.request(), 'oldpassword', password=Binary(hash)) + with self.admin_access.client_cnx() as cnx: + self.create_user(cnx, 'iaminusersgrouponly') + hash = _CRYPTO_CTX.encrypt('oldpassword', scheme='des_crypt') + self.create_user(cnx, 'oldpassword', password=Binary(hash)) class LowLevelSecurityFunctionTC(BaseSecurityTC): @@ -44,34 +41,40 @@ rql = u'Personne U where U nom "managers"' rqlst = self.repo.vreg.rqlhelper.parse(rql).children[0] with self.temporary_permissions(Personne={'read': ('users', 'managers')}): - self.repo.vreg.solutions(self.session, rqlst, None) - solution = rqlst.solutions[0] - check_read_access(self.session, rqlst, solution, {}) - with self.login('anon') as cu: + with self.admin_access.repo_cnx() as cnx: + self.repo.vreg.solutions(cnx, rqlst, None) + solution = rqlst.solutions[0] + check_read_access(cnx, rqlst, solution, {}) + with self.new_access('anon').repo_cnx() as cnx: self.assertRaises(Unauthorized, check_read_access, - self.session, rqlst, solution, {}) - self.assertRaises(Unauthorized, cu.execute, rql) + cnx, rqlst, solution, {}) + self.assertRaises(Unauthorized, cnx.execute, rql) def test_upassword_not_selectable(self): - self.assertRaises(Unauthorized, - self.execute, 'Any X,P WHERE X is CWUser, X upassword P') - self.rollback() - with self.login('iaminusersgrouponly') as cu: + with self.admin_access.repo_cnx() as cnx: self.assertRaises(Unauthorized, - cu.execute, 'Any X,P WHERE X is CWUser, X upassword P') + cnx.execute, 'Any X,P WHERE X is CWUser, X upassword P') + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + self.assertRaises(Unauthorized, + cnx.execute, 'Any X,P WHERE X is CWUser, X upassword P') def test_update_password(self): - """Ensure that if a user's password is stored with a deprecated hash, it will be updated on next login""" - oldhash = str(self.session.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE cw_login = 'oldpassword'").fetchone()[0]) - with self.login('oldpassword') as cu: - pass - newhash = str(self.session.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE cw_login = 'oldpassword'").fetchone()[0]) - self.assertNotEqual(oldhash, newhash) - self.assertTrue(newhash.startswith('$6$')) - with self.login('oldpassword') as cu: - pass - self.assertEqual(newhash, str(self.session.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE cw_login = 'oldpassword'").fetchone()[0])) + """Ensure that if a user's password is stored with a deprecated hash, + it will be updated on next login + """ + with self.repo.internal_cnx() as cnx: + oldhash = str(cnx.system_sql("SELECT cw_upassword FROM cw_CWUser " + "WHERE cw_login = 'oldpassword'").fetchone()[0]) + self.repo.close(self.repo.connect('oldpassword', password='oldpassword')) + newhash = str(cnx.system_sql("SELECT cw_upassword FROM cw_CWUser " + "WHERE cw_login = 'oldpassword'").fetchone()[0]) + self.assertNotEqual(oldhash, newhash) + self.assertTrue(newhash.startswith('$6$')) + self.repo.close(self.repo.connect('oldpassword', password='oldpassword')) + self.assertEqual(newhash, + str(cnx.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE " + "cw_login = 'oldpassword'").fetchone()[0])) class SecurityRewritingTC(BaseSecurityTC): @@ -86,84 +89,88 @@ super(SecurityRewritingTC, self).tearDown() def test_not_relation_read_security(self): - with self.login('iaminusersgrouponly'): + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: self.hijack_source_execute() - self.execute('Any U WHERE NOT A todo_by U, A is Affaire') + cnx.execute('Any U WHERE NOT A todo_by U, A is Affaire') self.assertEqual(self.query[0][1].as_string(), 'Any U WHERE NOT EXISTS(A todo_by U), A is Affaire') - self.execute('Any U WHERE NOT EXISTS(A todo_by U), A is Affaire') + cnx.execute('Any U WHERE NOT EXISTS(A todo_by U), A is Affaire') self.assertEqual(self.query[0][1].as_string(), 'Any U WHERE NOT EXISTS(A todo_by U), A is Affaire') class SecurityTC(BaseSecurityTC): def setUp(self): - BaseSecurityTC.setUp(self) + super(SecurityTC, self).setUp() # implicitly test manager can add some entities - self.execute("INSERT Affaire X: X sujet 'cool'") - self.execute("INSERT Societe X: X nom 'logilab'") - self.execute("INSERT Personne X: X nom 'bidule'") - self.execute('INSERT CWGroup X: X name "staff"') - self.commit() + with self.admin_access.repo_cnx() as cnx: + cnx.execute("INSERT Affaire X: X sujet 'cool'") + cnx.execute("INSERT Societe X: X nom 'logilab'") + cnx.execute("INSERT Personne X: X nom 'bidule'") + cnx.execute('INSERT CWGroup X: X name "staff"') + cnx.commit() def test_insert_security(self): - with self.login('anon') as cu: - cu.execute("INSERT Personne X: X nom 'bidule'") - self.assertRaises(Unauthorized, self.commit) - self.assertEqual(cu.execute('Personne X').rowcount, 1) + with self.new_access('anon').repo_cnx() as cnx: + cnx.execute("INSERT Personne X: X nom 'bidule'") + self.assertRaises(Unauthorized, cnx.commit) + self.assertEqual(cnx.execute('Personne X').rowcount, 1) def test_insert_rql_permission(self): # test user can only add une affaire related to a societe he owns - with self.login('iaminusersgrouponly') as cu: - cu.execute("INSERT Affaire X: X sujet 'cool'") - self.assertRaises(Unauthorized, self.commit) + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute("INSERT Affaire X: X sujet 'cool'") + self.assertRaises(Unauthorized, cnx.commit) # test nothing has actually been inserted - self.assertEqual(self.execute('Affaire X').rowcount, 1) - with self.login('iaminusersgrouponly') as cu: - cu.execute("INSERT Affaire X: X sujet 'cool'") - cu.execute("INSERT Societe X: X nom 'chouette'") - cu.execute("SET A concerne S WHERE A sujet 'cool', S nom 'chouette'") - self.commit() + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(cnx.execute('Affaire X').rowcount, 1) + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute("INSERT Affaire X: X sujet 'cool'") + cnx.execute("INSERT Societe X: X nom 'chouette'") + cnx.execute("SET A concerne S WHERE A sujet 'cool', S nom 'chouette'") + cnx.commit() def test_update_security_1(self): - with self.login('anon') as cu: + with self.new_access('anon').repo_cnx() as cnx: # local security check - cu.execute( "SET X nom 'bidulechouette' WHERE X is Personne") - self.assertRaises(Unauthorized, self.commit) - self.assertEqual(self.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0) + cnx.execute( "SET X nom 'bidulechouette' WHERE X is Personne") + self.assertRaises(Unauthorized, cnx.commit) + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(cnx.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0) def test_update_security_2(self): with self.temporary_permissions(Personne={'read': ('users', 'managers'), 'add': ('guests', 'users', 'managers')}): - with self.login('anon') as cu: - self.assertRaises(Unauthorized, cu.execute, "SET X nom 'bidulechouette' WHERE X is Personne") - self.rollback() - # self.assertRaises(Unauthorized, cnx.commit) + with self.new_access('anon').repo_cnx() as cnx: + self.assertRaises(Unauthorized, cnx.execute, + "SET X nom 'bidulechouette' WHERE X is Personne") # test nothing has actually been inserted - self.assertEqual(self.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0) + with self.admin_access.repo_cnx() as cnx: + self.assertEqual(cnx.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0) def test_update_security_3(self): - with self.login('iaminusersgrouponly') as cu: - cu.execute("INSERT Personne X: X nom 'biduuule'") - cu.execute("INSERT Societe X: X nom 'looogilab'") - cu.execute("SET X travaille S WHERE X nom 'biduuule', S nom 'looogilab'") + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute("INSERT Personne X: X nom 'biduuule'") + cnx.execute("INSERT Societe X: X nom 'looogilab'") + cnx.execute("SET X travaille S WHERE X nom 'biduuule', S nom 'looogilab'") def test_update_rql_permission(self): - self.execute("SET A concerne S WHERE A is Affaire, S is Societe") - self.commit() + with self.admin_access.repo_cnx() as cnx: + cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") + cnx.commit() # test user can only update une affaire related to a societe he owns - with self.login('iaminusersgrouponly') as cu: - cu.execute("SET X sujet 'pascool' WHERE X is Affaire") + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute("SET X sujet 'pascool' WHERE X is Affaire") # this won't actually do anything since the selection query won't return anything - self.commit() + cnx.commit() # to actually get Unauthorized exception, try to update an entity we can read - cu.execute("SET X nom 'toto' WHERE X is Societe") - self.assertRaises(Unauthorized, self.commit) - cu.execute("INSERT Affaire X: X sujet 'pascool'") - cu.execute("INSERT Societe X: X nom 'chouette'") - cu.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'") - cu.execute("SET X sujet 'habahsicestcool' WHERE X sujet 'pascool'") - self.commit() + cnx.execute("SET X nom 'toto' WHERE X is Societe") + self.assertRaises(Unauthorized, cnx.commit) + cnx.execute("INSERT Affaire X: X sujet 'pascool'") + cnx.execute("INSERT Societe X: X nom 'chouette'") + cnx.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'") + cnx.execute("SET X sujet 'habahsicestcool' WHERE X sujet 'pascool'") + cnx.commit() def test_delete_security(self): # FIXME: sample below fails because we don't detect "owner" can't delete @@ -173,199 +180,205 @@ #self.assertRaises(Unauthorized, # self.o.execute, user, "DELETE CWUser X WHERE X login 'bidule'") # check local security - with self.login('iaminusersgrouponly') as cu: - self.assertRaises(Unauthorized, cu.execute, "DELETE CWGroup Y WHERE Y name 'staff'") - self.rollback() + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + self.assertRaises(Unauthorized, cnx.execute, "DELETE CWGroup Y WHERE Y name 'staff'") def test_delete_rql_permission(self): - self.execute("SET A concerne S WHERE A is Affaire, S is Societe") - self.commit() + with self.admin_access.repo_cnx() as cnx: + cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") + cnx.commit() # test user can only dele une affaire related to a societe he owns - with self.login('iaminusersgrouponly') as cu: + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: # this won't actually do anything since the selection query won't return anything - cu.execute("DELETE Affaire X") - self.commit() + cnx.execute("DELETE Affaire X") + cnx.commit() # to actually get Unauthorized exception, try to delete an entity we can read - self.assertRaises(Unauthorized, cu.execute, "DELETE Societe S") - self.assertRaises(QueryError, self.commit) # can't commit anymore - self.rollback() # required after Unauthorized - cu.execute("INSERT Affaire X: X sujet 'pascool'") - cu.execute("INSERT Societe X: X nom 'chouette'") - cu.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'") - self.commit() + self.assertRaises(Unauthorized, cnx.execute, "DELETE Societe S") + self.assertRaises(QueryError, cnx.commit) # can't commit anymore + cnx.rollback() + cnx.execute("INSERT Affaire X: X sujet 'pascool'") + cnx.execute("INSERT Societe X: X nom 'chouette'") + cnx.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'") + cnx.commit() ## # this one should fail since it will try to delete two affaires, one authorized ## # and the other not -## self.assertRaises(Unauthorized, cu.execute, "DELETE Affaire X") - cu.execute("DELETE Affaire X WHERE X sujet 'pascool'") - self.commit() - +## self.assertRaises(Unauthorized, cnx.execute, "DELETE Affaire X") + cnx.execute("DELETE Affaire X WHERE X sujet 'pascool'") + cnx.commit() def test_insert_relation_rql_permission(self): - with self.login('iaminusersgrouponly') as cu: - cu.execute("SET A concerne S WHERE A is Affaire, S is Societe") + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") # should raise Unauthorized since user don't own S though this won't # actually do anything since the selection query won't return # anything - self.commit() + cnx.commit() # to actually get Unauthorized exception, try to insert a relation # were we can read both entities - rset = cu.execute('Personne P') + rset = cnx.execute('Personne P') self.assertEqual(len(rset), 1) ent = rset.get_entity(0, 0) - self.assertFalse(cu.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe')) + self.assertFalse(cnx.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe')) self.assertRaises(Unauthorized, ent.cw_check_perm, 'update') self.assertRaises(Unauthorized, - cu.execute, "SET P travaille S WHERE P is Personne, S is Societe") - self.assertRaises(QueryError, self.commit) # can't commit anymore - self.rollback() + cnx.execute, "SET P travaille S WHERE P is Personne, S is Societe") + self.assertRaises(QueryError, cnx.commit) # can't commit anymore + cnx.rollback() # test nothing has actually been inserted: - self.assertFalse(cu.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe')) - cu.execute("INSERT Societe X: X nom 'chouette'") - cu.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'") - self.commit() + self.assertFalse(cnx.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe')) + cnx.execute("INSERT Societe X: X nom 'chouette'") + cnx.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'") + cnx.commit() def test_delete_relation_rql_permission(self): - self.execute("SET A concerne S WHERE A is Affaire, S is Societe") - self.commit() - with self.login('iaminusersgrouponly') as cu: + with self.admin_access.repo_cnx() as cnx: + cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") + cnx.commit() + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: # this won't actually do anything since the selection query won't return anything - cu.execute("DELETE A concerne S") - self.commit() - # to actually get Unauthorized exception, try to delete a relation we can read - eid = self.execute("INSERT Affaire X: X sujet 'pascool'")[0][0] - self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': eid}) - self.execute("SET A concerne S WHERE A sujet 'pascool', S is Societe") - self.commit() - with self.login('iaminusersgrouponly') as cu: - self.assertRaises(Unauthorized, cu.execute, "DELETE A concerne S") - self.assertRaises(QueryError, self.commit) # can't commit anymore - self.rollback() # required after Unauthorized - cu.execute("INSERT Societe X: X nom 'chouette'") - cu.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'") - self.commit() - cu.execute("DELETE A concerne S WHERE S nom 'chouette'") - self.commit() + cnx.execute("DELETE A concerne S") + cnx.commit() + with self.admin_access.repo_cnx() as cnx: + # to actually get Unauthorized exception, try to delete a relation we can read + eid = cnx.execute("INSERT Affaire X: X sujet 'pascool'")[0][0] + cnx.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', + {'x': eid}) + cnx.execute("SET A concerne S WHERE A sujet 'pascool', S is Societe") + cnx.commit() + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + self.assertRaises(Unauthorized, cnx.execute, "DELETE A concerne S") + self.assertRaises(QueryError, cnx.commit) # can't commit anymore + cnx.rollback() + cnx.execute("INSERT Societe X: X nom 'chouette'") + cnx.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'") + cnx.commit() + cnx.execute("DELETE A concerne S WHERE S nom 'chouette'") + cnx.commit() def test_user_can_change_its_upassword(self): - req = self.request() - ueid = self.create_user(req, 'user').eid - with self.login('user') as cu: - cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', + with self.admin_access.repo_cnx() as cnx: + ueid = self.create_user(cnx, 'user').eid + with self.new_access('user').repo_cnx() as cnx: + cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', {'x': ueid, 'passwd': 'newpwd'}) - self.commit() - cnx = self.login('user', password='newpwd') - cnx.close() + cnx.commit() + self.repo.close(self.repo.connect('user', password='newpwd')) def test_user_cant_change_other_upassword(self): - req = self.request() - ueid = self.create_user(req, 'otheruser').eid - with self.login('iaminusersgrouponly') as cu: - cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', + with self.admin_access.repo_cnx() as cnx: + ueid = self.create_user(cnx, 'otheruser').eid + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', {'x': ueid, 'passwd': 'newpwd'}) - self.assertRaises(Unauthorized, self.commit) + self.assertRaises(Unauthorized, cnx.commit) # read security test def test_read_base(self): with self.temporary_permissions(Personne={'read': ('users', 'managers')}): - with self.login('anon') as cu: + with self.new_access('anon').repo_cnx() as cnx: self.assertRaises(Unauthorized, - cu.execute, 'Personne U where U nom "managers"') - self.rollback() + cnx.execute, 'Personne U where U nom "managers"') def test_read_erqlexpr_base(self): - eid = self.execute("INSERT Affaire X: X sujet 'cool'")[0][0] - self.commit() - with self.login('iaminusersgrouponly') as cu: - rset = cu.execute('Affaire X') + with self.admin_access.repo_cnx() as cnx: + eid = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] + cnx.commit() + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + rset = cnx.execute('Affaire X') self.assertEqual(rset.rows, []) - self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}) + self.assertRaises(Unauthorized, cnx.execute, 'Any X WHERE X eid %(x)s', {'x': eid}) # cache test - self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}) - aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0] - soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0] - cu.execute("SET A concerne S WHERE A is Affaire, S is Societe") - self.commit() - rset = cu.execute('Any X WHERE X eid %(x)s', {'x': aff2}) + self.assertRaises(Unauthorized, cnx.execute, 'Any X WHERE X eid %(x)s', {'x': eid}) + aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] + soc1 = cnx.execute("INSERT Societe X: X nom 'chouette'")[0][0] + cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") + cnx.commit() + rset = cnx.execute('Any X WHERE X eid %(x)s', {'x': aff2}) self.assertEqual(rset.rows, [[aff2]]) # more cache test w/ NOT eid - rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': eid}) + rset = cnx.execute('Affaire X WHERE NOT X eid %(x)s', {'x': eid}) self.assertEqual(rset.rows, [[aff2]]) - rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2}) + rset = cnx.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2}) self.assertEqual(rset.rows, []) # test can't update an attribute of an entity that can't be readen - self.assertRaises(Unauthorized, cu.execute, 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid}) - self.rollback() + self.assertRaises(Unauthorized, cnx.execute, + 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid}) def test_entity_created_in_transaction(self): affschema = self.schema['Affaire'] with self.temporary_permissions(Affaire={'read': affschema.permissions['add']}): - with self.login('iaminusersgrouponly') as cu: - aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0] + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] # entity created in transaction are readable *by eid* - self.assertTrue(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2})) + self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x':aff2})) # XXX would be nice if it worked - rset = cu.execute("Affaire X WHERE X sujet 'cool'") + rset = cnx.execute("Affaire X WHERE X sujet 'cool'") self.assertEqual(len(rset), 0) - self.assertRaises(Unauthorized, self.commit) + self.assertRaises(Unauthorized, cnx.commit) def test_read_erqlexpr_has_text1(self): - aff1 = self.execute("INSERT Affaire X: X sujet 'cool'")[0][0] - card1 = self.execute("INSERT Card X: X title 'cool'")[0][0] - self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': card1}) - self.commit() - with self.login('iaminusersgrouponly') as cu: - aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0] - soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0] - cu.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1}) - self.commit() - self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x':aff1}) - self.assertTrue(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2})) - self.assertTrue(cu.execute('Any X WHERE X eid %(x)s', {'x':card1})) - rset = cu.execute("Any X WHERE X has_text 'cool'") + with self.admin_access.repo_cnx() as cnx: + aff1 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] + card1 = cnx.execute("INSERT Card X: X title 'cool'")[0][0] + cnx.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', + {'x': card1}) + cnx.commit() + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] + soc1 = cnx.execute("INSERT Societe X: X nom 'chouette'")[0][0] + cnx.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1}) + cnx.commit() + self.assertRaises(Unauthorized, cnx.execute, 'Any X WHERE X eid %(x)s', {'x':aff1}) + self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x':aff2})) + self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x':card1})) + rset = cnx.execute("Any X WHERE X has_text 'cool'") self.assertEqual(sorted(eid for eid, in rset.rows), [card1, aff2]) - self.rollback() def test_read_erqlexpr_has_text2(self): - self.execute("INSERT Personne X: X nom 'bidule'") - self.execute("INSERT Societe X: X nom 'bidule'") - self.commit() + with self.admin_access.repo_cnx() as cnx: + cnx.execute("INSERT Personne X: X nom 'bidule'") + cnx.execute("INSERT Societe X: X nom 'bidule'") + cnx.commit() with self.temporary_permissions(Personne={'read': ('managers',)}): - with self.login('iaminusersgrouponly') as cu: - rset = cu.execute('Any N WHERE N has_text "bidule"') + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + rset = cnx.execute('Any N WHERE N has_text "bidule"') self.assertEqual(len(rset.rows), 1, rset.rows) - rset = cu.execute('Any N WITH N BEING (Any N WHERE N has_text "bidule")') + rset = cnx.execute('Any N WITH N BEING (Any N WHERE N has_text "bidule")') self.assertEqual(len(rset.rows), 1, rset.rows) def test_read_erqlexpr_optional_rel(self): - self.execute("INSERT Personne X: X nom 'bidule'") - self.execute("INSERT Societe X: X nom 'bidule'") - self.commit() + with self.admin_access.repo_cnx() as cnx: + cnx.execute("INSERT Personne X: X nom 'bidule'") + cnx.execute("INSERT Societe X: X nom 'bidule'") + cnx.commit() with self.temporary_permissions(Personne={'read': ('managers',)}): - with self.login('anon') as cu: - rset = cu.execute('Any N,U WHERE N has_text "bidule", N owned_by U?') + with self.new_access('anon').repo_cnx() as cnx: + rset = cnx.execute('Any N,U WHERE N has_text "bidule", N owned_by U?') self.assertEqual(len(rset.rows), 1, rset.rows) def test_read_erqlexpr_aggregat(self): - self.execute("INSERT Affaire X: X sujet 'cool'")[0][0] - self.commit() - with self.login('iaminusersgrouponly') as cu: - rset = cu.execute('Any COUNT(X) WHERE X is Affaire') + with self.admin_access.repo_cnx() as cnx: + cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] + cnx.commit() + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + rset = cnx.execute('Any COUNT(X) WHERE X is Affaire') self.assertEqual(rset.rows, [[0]]) - aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0] - soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0] - cu.execute("SET A concerne S WHERE A is Affaire, S is Societe") - self.commit() - rset = cu.execute('Any COUNT(X) WHERE X is Affaire') + aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] + soc1 = cnx.execute("INSERT Societe X: X nom 'chouette'")[0][0] + cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") + cnx.commit() + rset = cnx.execute('Any COUNT(X) WHERE X is Affaire') self.assertEqual(rset.rows, [[1]]) - rset = cu.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN') + rset = cnx.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN') values = dict(rset) self.assertEqual(values['Affaire'], 1) self.assertEqual(values['Societe'], 2) - rset = cu.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN WITH X BEING ((Affaire X) UNION (Societe X))') + rset = cnx.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN ' + 'WITH X BEING ((Affaire X) UNION (Societe X))') self.assertEqual(len(rset), 2) values = dict(rset) self.assertEqual(values['Affaire'], 1) @@ -373,80 +386,86 @@ def test_attribute_security(self): - # only managers should be able to edit the 'test' attribute of Personne entities - eid = self.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org', X test TRUE")[0][0] - self.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}) - self.commit() - with self.login('iaminusersgrouponly') as cu: - cu.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org', X test TRUE") - self.assertRaises(Unauthorized, self.commit) - cu.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org', X test FALSE") - self.assertRaises(Unauthorized, self.commit) - eid = cu.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org'")[0][0] - self.commit() - cu.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}) - self.assertRaises(Unauthorized, self.commit) - cu.execute('SET X test TRUE WHERE X eid %(x)s', {'x': eid}) - self.assertRaises(Unauthorized, self.commit) - cu.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid}) - self.commit() - with self.login('iaminusersgrouponly') as cu: - eid = cu.execute('INSERT Frozable F: F name "Foo"') - self.commit() - cu.execute('SET F name "Bar" WHERE F is Frozable') - self.commit() - cu.execute('SET F name "BaBar" WHERE F is Frozable') - cu.execute('SET F frozen True WHERE F is Frozable') + with self.admin_access.repo_cnx() as cnx: + # only managers should be able to edit the 'test' attribute of Personne entities + eid = cnx.execute("INSERT Personne X: X nom 'bidule', " + "X web 'http://www.debian.org', X test TRUE")[0][0] + cnx.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}) + cnx.commit() + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute("INSERT Personne X: X nom 'bidule', " + "X web 'http://www.debian.org', X test TRUE") + self.assertRaises(Unauthorized, cnx.commit) + cnx.execute("INSERT Personne X: X nom 'bidule', " + "X web 'http://www.debian.org', X test FALSE") + self.assertRaises(Unauthorized, cnx.commit) + eid = cnx.execute("INSERT Personne X: X nom 'bidule', " + "X web 'http://www.debian.org'")[0][0] + cnx.commit() + cnx.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}) + self.assertRaises(Unauthorized, cnx.commit) + cnx.execute('SET X test TRUE WHERE X eid %(x)s', {'x': eid}) + self.assertRaises(Unauthorized, cnx.commit) + cnx.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid}) + cnx.commit() + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute('INSERT Frozable F: F name "Foo"') + cnx.commit() + cnx.execute('SET F name "Bar" WHERE F is Frozable') + cnx.commit() + cnx.execute('SET F name "BaBar" WHERE F is Frozable') + cnx.execute('SET F frozen True WHERE F is Frozable') with self.assertRaises(Unauthorized): - self.commit() - self.rollback() - cu.execute('SET F frozen True WHERE F is Frozable') - self.commit() - cu.execute('SET F name "Bar" WHERE F is Frozable') + cnx.commit() + cnx.rollback() + cnx.execute('SET F frozen True WHERE F is Frozable') + cnx.commit() + cnx.execute('SET F name "Bar" WHERE F is Frozable') with self.assertRaises(Unauthorized): - self.commit() - self.rollback() + cnx.commit() def test_attribute_security_rqlexpr(self): - # Note.para attribute editable by managers or if the note is in "todo" state - note = self.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) - self.commit() - note.cw_adapt_to('IWorkflowable').fire_transition('markasdone') - self.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid}) - self.commit() - with self.login('iaminusersgrouponly') as cu: - cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid}) - self.assertRaises(Unauthorized, self.commit) - note2 = cu.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) - self.commit() + with self.admin_access.repo_cnx() as cnx: + # Note.para attribute editable by managers or if the note is in "todo" state + note = cnx.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) + cnx.commit() + note.cw_adapt_to('IWorkflowable').fire_transition('markasdone') + cnx.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid}) + cnx.commit() + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid}) + self.assertRaises(Unauthorized, cnx.commit) + note2 = cnx.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) + cnx.commit() note2.cw_adapt_to('IWorkflowable').fire_transition('markasdone') - self.commit() - self.assertEqual(len(cu.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', {'x': note2.eid})), + cnx.commit() + self.assertEqual(len(cnx.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', + {'x': note2.eid})), 0) - cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) - self.assertRaises(Unauthorized, self.commit) + cnx.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) + self.assertRaises(Unauthorized, cnx.commit) note2.cw_adapt_to('IWorkflowable').fire_transition('redoit') - self.commit() - cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) - self.commit() - cu.execute("INSERT Note X: X something 'A'") - self.assertRaises(Unauthorized, self.commit) - cu.execute("INSERT Note X: X para 'zogzog', X something 'A'") - self.commit() - note = cu.execute("INSERT Note X").get_entity(0,0) - self.commit() + cnx.commit() + cnx.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) + cnx.commit() + cnx.execute("INSERT Note X: X something 'A'") + self.assertRaises(Unauthorized, cnx.commit) + cnx.execute("INSERT Note X: X para 'zogzog', X something 'A'") + cnx.commit() + note = cnx.execute("INSERT Note X").get_entity(0,0) + cnx.commit() note.cw_set(something=u'B') - self.commit() + cnx.commit() note.cw_set(something=None, para=u'zogzog') - self.commit() + cnx.commit() def test_attribute_read_security(self): # anon not allowed to see users'login, but they can see users login_rdef = self.repo.schema['CWUser'].rdef('login') with self.temporary_permissions((login_rdef, {'read': ('users', 'managers')}), CWUser={'read': ('guests', 'users', 'managers')}): - with self.login('anon') as cu: - rset = cu.execute('CWUser X') + with self.new_access('anon').repo_cnx() as cnx: + rset = cnx.execute('CWUser X') self.assertTrue(rset) x = rset.get_entity(0, 0) self.assertEqual(x.login, None) @@ -457,17 +476,19 @@ self.assertTrue(x.creation_date) def test_yams_inheritance_and_security_bug(self): - with self.temporary_permissions(Division={'read': ('managers', ERQLExpression('X owned_by U'))}): - with self.login('iaminusersgrouponly'): - querier = self.repo.querier + with self.temporary_permissions(Division={'read': ('managers', + ERQLExpression('X owned_by U'))}): + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + querier = cnx.repo.querier rqlst = querier.parse('Any X WHERE X is_instance_of Societe') - querier.solutions(self.session, rqlst, {}) + querier.solutions(cnx, rqlst, {}) querier._annotate(rqlst) - plan = querier.plan_factory(rqlst, {}, self.session) + plan = querier.plan_factory(rqlst, {}, cnx) plan.preprocess(rqlst) self.assertEqual( rqlst.as_string(), - '(Any X WHERE X is IN(SubDivision, Societe)) UNION (Any X WHERE X is Division, EXISTS(X owned_by %(B)s))') + '(Any X WHERE X is IN(SubDivision, Societe)) UNION ' + '(Any X WHERE X is Division, EXISTS(X owned_by %(B)s))') class BaseSchemaSecurityTC(BaseSecurityTC): @@ -475,159 +496,155 @@ def test_user_can_delete_object_he_created(self): # even if some other user have changed object'state - with self.login('iaminusersgrouponly') as cu: + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: # due to security test, affaire has to concerne a societe the user owns - cu.execute('INSERT Societe X: X nom "ARCTIA"') - cu.execute('INSERT Affaire X: X ref "ARCT01", X concerne S WHERE S nom "ARCTIA"') - self.commit() - affaire = self.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0) - affaire.cw_adapt_to('IWorkflowable').fire_transition('abort') - self.commit() - self.assertEqual(len(self.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01"')), - 1) - self.assertEqual(len(self.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01",' - 'X owned_by U, U login "admin"')), - 1) # TrInfo at the above state change - with self.login('iaminusersgrouponly') as cu: - cu.execute('DELETE Affaire X WHERE X ref "ARCT01"') - self.commit() - self.assertFalse(cu.execute('Affaire X')) + cnx.execute('INSERT Societe X: X nom "ARCTIA"') + cnx.execute('INSERT Affaire X: X ref "ARCT01", X concerne S WHERE S nom "ARCTIA"') + cnx.commit() + with self.admin_access.repo_cnx() as cnx: + affaire = cnx.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0) + affaire.cw_adapt_to('IWorkflowable').fire_transition('abort') + cnx.commit() + self.assertEqual(len(cnx.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01"')), + 1) + self.assertEqual(len(cnx.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01",' + 'X owned_by U, U login "admin"')), + 1) # TrInfo at the above state change + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + cnx.execute('DELETE Affaire X WHERE X ref "ARCT01"') + cnx.commit() + self.assertFalse(cnx.execute('Affaire X')) def test_users_and_groups_non_readable_by_guests(self): - with self.login('anon') as cu: - anon = cu.connection.user(self.session) + with self.repo.internal_cnx() as cnx: + admineid = cnx.execute('CWUser U WHERE U login "admin"').rows[0][0] + with self.new_access('anon').repo_cnx() as cnx: + anon = cnx.user # anonymous user can only read itself - rset = cu.execute('Any L WHERE X owned_by U, U login L') + rset = cnx.execute('Any L WHERE X owned_by U, U login L') self.assertEqual([['anon']], rset.rows) - rset = cu.execute('CWUser X') + rset = cnx.execute('CWUser X') self.assertEqual([[anon.eid]], rset.rows) # anonymous user can read groups (necessary to check allowed transitions for instance) - self.assert_(cu.execute('CWGroup X')) + self.assert_(cnx.execute('CWGroup X')) # should only be able to read the anonymous user, not another one - origuser = self.adminsession.user self.assertRaises(Unauthorized, - cu.execute, 'CWUser X WHERE X eid %(x)s', {'x': origuser.eid}) - # nothing selected, nothing updated, no exception raised - #self.assertRaises(Unauthorized, - # cu.execute, 'SET X login "toto" WHERE X eid %(x)s', - # {'x': self.user.eid}) - - rset = cu.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid}) + cnx.execute, 'CWUser X WHERE X eid %(x)s', {'x': admineid}) + rset = cnx.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid}) self.assertEqual([[anon.eid]], rset.rows) # but can't modify it - cu.execute('SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid}) - self.assertRaises(Unauthorized, self.commit) + cnx.execute('SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid}) + self.assertRaises(Unauthorized, cnx.commit) def test_in_group_relation(self): - with self.login('iaminusersgrouponly') as cu: + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: rql = u"DELETE U in_group G WHERE U login 'admin'" - self.assertRaises(Unauthorized, cu.execute, rql) + self.assertRaises(Unauthorized, cnx.execute, rql) rql = u"SET U in_group G WHERE U login 'admin', G name 'users'" - self.assertRaises(Unauthorized, cu.execute, rql) - self.rollback() + self.assertRaises(Unauthorized, cnx.execute, rql) def test_owned_by(self): - self.execute("INSERT Personne X: X nom 'bidule'") - self.commit() - with self.login('iaminusersgrouponly') as cu: + with self.admin_access.repo_cnx() as cnx: + cnx.execute("INSERT Personne X: X nom 'bidule'") + cnx.commit() + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: rql = u"SET X owned_by U WHERE U login 'iaminusersgrouponly', X is Personne" - self.assertRaises(Unauthorized, cu.execute, rql) - self.rollback() + self.assertRaises(Unauthorized, cnx.execute, rql) def test_bookmarked_by_guests_security(self): - beid1 = self.execute('INSERT Bookmark B: B path "?vid=manage", B title "manage"')[0][0] - beid2 = self.execute('INSERT Bookmark B: B path "?vid=index", B title "index", B bookmarked_by U WHERE U login "anon"')[0][0] - self.commit() - with self.login('anon') as cu: - anoneid = self.session.user.eid - self.assertEqual(cu.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,' + with self.admin_access.repo_cnx() as cnx: + beid1 = cnx.execute('INSERT Bookmark B: B path "?vid=manage", B title "manage"')[0][0] + beid2 = cnx.execute('INSERT Bookmark B: B path "?vid=index", B title "index", ' + 'B bookmarked_by U WHERE U login "anon"')[0][0] + cnx.commit() + with self.new_access('anon').repo_cnx() as cnx: + anoneid = cnx.user.eid + self.assertEqual(cnx.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,' 'B bookmarked_by U, U eid %s' % anoneid).rows, [['index', '?vid=index']]) - self.assertEqual(cu.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,' + self.assertEqual(cnx.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,' 'B bookmarked_by U, U eid %(x)s', {'x': anoneid}).rows, [['index', '?vid=index']]) # can read others bookmarks as well - self.assertEqual(cu.execute('Any B where B is Bookmark, NOT B bookmarked_by U').rows, + self.assertEqual(cnx.execute('Any B where B is Bookmark, NOT B bookmarked_by U').rows, [[beid1]]) - self.assertRaises(Unauthorized, cu.execute,'DELETE B bookmarked_by U') + self.assertRaises(Unauthorized, cnx.execute,'DELETE B bookmarked_by U') self.assertRaises(Unauthorized, - cu.execute, 'SET B bookmarked_by U WHERE U eid %(x)s, B eid %(b)s', + cnx.execute, 'SET B bookmarked_by U WHERE U eid %(x)s, B eid %(b)s', {'x': anoneid, 'b': beid1}) - self.rollback() def test_ambigous_ordered(self): - with self.login('anon') as cu: - names = [t for t, in cu.execute('Any N ORDERBY lower(N) WHERE X name N')] + with self.new_access('anon').repo_cnx() as cnx: + names = [t for t, in cnx.execute('Any N ORDERBY lower(N) WHERE X name N')] self.assertEqual(names, sorted(names, key=lambda x: x.lower())) def test_in_state_without_update_perm(self): """check a user change in_state without having update permission on the subject """ - eid = self.execute('INSERT Affaire X: X ref "ARCT01"')[0][0] - self.commit() - with self.login('iaminusersgrouponly') as cu: - session = self.session - # needed to avoid check_perm error - session.set_cnxset() + with self.admin_access.repo_cnx() as cnx: + eid = cnx.execute('INSERT Affaire X: X ref "ARCT01"')[0][0] + cnx.commit() + with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: # needed to remove rql expr granting update perm to the user affschema = self.schema['Affaire'] with self.temporary_permissions(Affaire={'update': affschema.get_groups('update'), 'read': ('users',)}): self.assertRaises(Unauthorized, - affschema.check_perm, session, 'update', eid=eid) - aff = cu.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0) + affschema.check_perm, cnx, 'update', eid=eid) + aff = cnx.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0) aff.cw_adapt_to('IWorkflowable').fire_transition('abort') - self.commit() + cnx.commit() # though changing a user state (even logged user) is reserved to managers - user = self.user(session) - session.set_cnxset() + user = cnx.user # XXX wether it should raise Unauthorized or ValidationError is not clear # the best would probably ValidationError if the transition doesn't exist # from the current state but Unauthorized if it exists but user can't pass it self.assertRaises(ValidationError, user.cw_adapt_to('IWorkflowable').fire_transition, 'deactivate') - self.rollback() # else will fail on login cm exit def test_trinfo_security(self): - aff = self.execute('INSERT Affaire X: X ref "ARCT01"').get_entity(0, 0) - iworkflowable = aff.cw_adapt_to('IWorkflowable') - self.commit() - iworkflowable.fire_transition('abort') - self.commit() - # can change tr info comment - self.execute('SET TI comment %(c)s WHERE TI wf_info_for X, X ref "ARCT01"', - {'c': u'bouh!'}) - self.commit() - aff.cw_clear_relation_cache('wf_info_for', 'object') - trinfo = iworkflowable.latest_trinfo() - self.assertEqual(trinfo.comment, 'bouh!') - # but not from_state/to_state - aff.cw_clear_relation_cache('wf_info_for', role='object') - self.assertRaises(Unauthorized, - self.execute, 'SET TI from_state S WHERE TI eid %(ti)s, S name "ben non"', - {'ti': trinfo.eid}) - self.assertRaises(Unauthorized, - self.execute, 'SET TI to_state S WHERE TI eid %(ti)s, S name "pitetre"', - {'ti': trinfo.eid}) + with self.admin_access.repo_cnx() as cnx: + aff = cnx.execute('INSERT Affaire X: X ref "ARCT01"').get_entity(0, 0) + iworkflowable = aff.cw_adapt_to('IWorkflowable') + cnx.commit() + iworkflowable.fire_transition('abort') + cnx.commit() + # can change tr info comment + cnx.execute('SET TI comment %(c)s WHERE TI wf_info_for X, X ref "ARCT01"', + {'c': u'bouh!'}) + cnx.commit() + aff.cw_clear_relation_cache('wf_info_for', 'object') + trinfo = iworkflowable.latest_trinfo() + self.assertEqual(trinfo.comment, 'bouh!') + # but not from_state/to_state + aff.cw_clear_relation_cache('wf_info_for', role='object') + self.assertRaises(Unauthorized, cnx.execute, + 'SET TI from_state S WHERE TI eid %(ti)s, S name "ben non"', + {'ti': trinfo.eid}) + self.assertRaises(Unauthorized, cnx.execute, + 'SET TI to_state S WHERE TI eid %(ti)s, S name "pitetre"', + {'ti': trinfo.eid}) def test_emailaddress_security(self): # check for prexisting email adresse - if self.execute('Any X WHERE X is EmailAddress'): - rset = self.execute('Any X, U WHERE X is EmailAddress, U use_email X') - msg = ['Preexisting email readable by anon found!'] - tmpl = ' - "%s" used by user "%s"' - for i in xrange(len(rset)): - email, user = rset.get_entity(i, 0), rset.get_entity(i, 1) - msg.append(tmpl % (email.dc_title(), user.dc_title())) - raise RuntimeError('\n'.join(msg)) - # actual test - self.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) - self.execute('INSERT EmailAddress X: X address "anon", U use_email X WHERE U login "anon"').get_entity(0, 0) - self.commit() - self.assertEqual(len(self.execute('Any X WHERE X is EmailAddress')), 2) - self.login('anon') - self.assertEqual(len(self.execute('Any X WHERE X is EmailAddress')), 1) + with self.admin_access.repo_cnx() as cnx: + if cnx.execute('Any X WHERE X is EmailAddress'): + rset = cnx.execute('Any X, U WHERE X is EmailAddress, U use_email X') + msg = ['Preexisting email readable by anon found!'] + tmpl = ' - "%s" used by user "%s"' + for i in xrange(len(rset)): + email, user = rset.get_entity(i, 0), rset.get_entity(i, 1) + msg.append(tmpl % (email.dc_title(), user.dc_title())) + raise RuntimeError('\n'.join(msg)) + # actual test + cnx.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) + cnx.execute('INSERT EmailAddress X: X address "anon", ' + 'U use_email X WHERE U login "anon"').get_entity(0, 0) + cnx.commit() + self.assertEqual(len(cnx.execute('Any X WHERE X is EmailAddress')), 2) + with self.new_access('anon').repo_cnx() as cnx: + self.assertEqual(len(cnx.execute('Any X WHERE X is EmailAddress')), 1) if __name__ == '__main__': unittest_main() diff -r 84738d495ffd -r 793377697c81 server/test/unittest_session.py --- a/server/test/unittest_session.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_session.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -18,6 +18,8 @@ from cubicweb.devtools.testlib import CubicWebTC from cubicweb.server.session import HOOKS_ALLOW_ALL, HOOKS_DENY_ALL +from cubicweb.server import hook +from cubicweb.predicates import is_instance class InternalSessionTC(CubicWebTC): def test_dbapi_query(self): @@ -39,10 +41,16 @@ def test_hooks_control(self): session = self.session + # this test check the "old" behavior of session with automatic connection management + # close the default cnx, we do nto want it to interfer with the test + self.cnx.close() + # open a dedicated one + session.set_cnx('Some-random-cnx-unrelated-to-the-default-one') + # go test go self.assertEqual(HOOKS_ALLOW_ALL, session.hooks_mode) self.assertEqual(set(), session.disabled_hook_categories) self.assertEqual(set(), session.enabled_hook_categories) - self.assertEqual(1, len(session._txs)) + self.assertEqual(1, len(session._cnxs)) with session.deny_all_hooks_but('metadata'): self.assertEqual(HOOKS_DENY_ALL, session.hooks_mode) self.assertEqual(set(), session.disabled_hook_categories) @@ -64,12 +72,52 @@ self.assertEqual(set(('metadata',)), session.enabled_hook_categories) # leaving context manager with no transaction running should reset the # transaction local storage (and associated cnxset) - self.assertEqual({}, session._txs) + self.assertEqual({}, session._cnxs) self.assertEqual(None, session.cnxset) self.assertEqual(HOOKS_ALLOW_ALL, session.hooks_mode, session.HOOKS_ALLOW_ALL) self.assertEqual(set(), session.disabled_hook_categories) self.assertEqual(set(), session.enabled_hook_categories) + def test_explicit_connection(self): + with self.session.new_cnx() as cnx: + rset = cnx.execute('Any X LIMIT 1 WHERE X is CWUser') + self.assertEqual(1, len(rset)) + user = rset.get_entity(0, 0) + user.cw_delete() + cnx.rollback() + new_user = cnx.entity_from_eid(user.eid) + self.assertIsNotNone(new_user.login) + self.assertFalse(cnx._open) + + def test_internal_cnx(self): + with self.repo.internal_cnx() as cnx: + rset = cnx.execute('Any X LIMIT 1 WHERE X is CWUser') + self.assertEqual(1, len(rset)) + user = rset.get_entity(0, 0) + user.cw_delete() + cnx.rollback() + new_user = cnx.entity_from_eid(user.eid) + self.assertIsNotNone(new_user.login) + self.assertFalse(cnx._open) + + def test_connection_exit(self): + """exiting a connection should roll back the transaction, including any + pending operations""" + self.rollbacked = False + class RollbackOp(hook.Operation): + _test = self + def rollback_event(self): + self._test.rollbacked = True + class RollbackHook(hook.Hook): + __regid__ = 'rollback' + events = ('after_update_entity',) + __select__ = hook.Hook.__select__ & is_instance('CWGroup') + def __call__(self): + RollbackOp(self._cw) + with self.temporary_appobjects(RollbackHook): + with self.admin_access.client_cnx() as cnx: + cnx.execute('SET G name "foo" WHERE G is CWGroup, G name "managers"') + self.assertTrue(self.rollbacked) if __name__ == '__main__': from logilab.common.testlib import unittest_main diff -r 84738d495ffd -r 793377697c81 server/test/unittest_sqlutils.py --- a/server/test/unittest_sqlutils.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_sqlutils.py Wed Sep 24 18:04:30 2014 +0200 @@ -51,18 +51,18 @@ class SQLUtilsTC(CubicWebTC): def test_group_concat(self): - req = self.request() - g = req.create_entity('CWGroup', name=u'héhé') - u = req.create_entity('CWUser', login=u'toto', upassword=u'', - in_group=g.eid) - rset = self.execute(u'Any L,GROUP_CONCAT(G) GROUPBY L WHERE X login L,' - u'X in_group G, G name GN, NOT G name IN ("users", "héhé")') - self.assertEqual([[u'admin', u'3'], [u'anon', u'2']], - rset.rows) - rset = self.execute('Any L,GROUP_CONCAT(GN) GROUPBY L WHERE X login L,' - 'X in_group G, G name GN, NOT G name "users"') - self.assertEqual([[u'admin', u'managers'], [u'anon', u'guests'], [u'toto', u'héhé']], - rset.rows) + with self.admin_access.repo_cnx() as cnx: + g = cnx.create_entity('CWGroup', name=u'héhé') + u = cnx.create_entity('CWUser', login=u'toto', upassword=u'', + in_group=g.eid) + rset = cnx.execute(u'Any L,GROUP_CONCAT(G) GROUPBY L WHERE X login L,' + u'X in_group G, G name GN, NOT G name IN ("users", "héhé")') + self.assertEqual([[u'admin', u'3'], [u'anon', u'2']], + rset.rows) + rset = cnx.execute('Any L,GROUP_CONCAT(GN) GROUPBY L WHERE X login L,' + 'X in_group G, G name GN, NOT G name "users"') + self.assertEqual([[u'admin', u'managers'], [u'anon', u'guests'], [u'toto', u'héhé']], + rset.rows) if __name__ == '__main__': unittest_main() diff -r 84738d495ffd -r 793377697c81 server/test/unittest_ssplanner.py --- a/server/test/unittest_ssplanner.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_ssplanner.py Wed Sep 24 18:04:30 2014 +0200 @@ -51,8 +51,7 @@ [{'X': 'Basket', 'XN': 'String'}, {'X': 'State', 'XN': 'String'}, {'X': 'Folder', 'XN': 'String'}])], - None, None, - [self.system], None, [])]) + None, [])]) def test_groupeded_ambigous_sol(self): self._test('Any XN,COUNT(X) GROUPBY XN WHERE X name XN, X is IN (Basket, State, Folder)', @@ -60,8 +59,7 @@ [{'X': 'Basket', 'XN': 'String'}, {'X': 'State', 'XN': 'String'}, {'X': 'Folder', 'XN': 'String'}])], - None, None, - [self.system], None, [])]) + None, [])]) if __name__ == '__main__': from logilab.common.testlib import unittest_main diff -r 84738d495ffd -r 793377697c81 server/test/unittest_storage.py --- a/server/test/unittest_storage.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_storage.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -28,7 +28,7 @@ from cubicweb import Binary, QueryError from cubicweb.predicates import is_instance from cubicweb.server.sources import storages -from cubicweb.server.hook import Hook, Operation +from cubicweb.server.hook import Hook class DummyBeforeHook(Hook): __regid__ = 'dummy-before-hook' @@ -50,7 +50,7 @@ assert oldvalue == self.entity.data.getvalue() class StorageTC(CubicWebTC): - + tempdir = None tags = CubicWebTC.tags | Tags('Storage', 'BFSS') def setup_database(self): @@ -65,255 +65,273 @@ shutil.rmtree(self.tempdir) - def create_file(self, content='the-data'): - req = self.request() - return req.create_entity('File', data=Binary(content), - data_format=u'text/plain', data_name=u'foo.pdf') + def create_file(self, cnx, content='the-data'): + return cnx.create_entity('File', data=Binary(content), + data_format=u'text/plain', + data_name=u'foo.pdf') - def fspath(self, entity): - fspath = self.execute('Any fspath(D) WHERE F eid %(f)s, F data D', - {'f': entity.eid})[0][0] + def fspath(self, cnx, entity): + fspath = cnx.execute('Any fspath(D) WHERE F eid %(f)s, F data D', + {'f': entity.eid})[0][0] return fspath.getvalue() def test_bfss_wrong_fspath_usage(self): - f1 = self.create_file() - self.execute('Any fspath(D) WHERE F eid %(f)s, F data D', {'f': f1.eid}) - with self.assertRaises(NotImplementedError) as cm: - self.execute('Any fspath(F) WHERE F eid %(f)s', {'f': f1.eid}) - self.assertEqual(str(cm.exception), - 'This callback is only available for BytesFileSystemStorage ' - 'managed attribute. Is FSPATH() argument BFSS managed?') + with self.admin_access.repo_cnx() as cnx: + f1 = self.create_file(cnx) + cnx.execute('Any fspath(D) WHERE F eid %(f)s, F data D', {'f': f1.eid}) + with self.assertRaises(NotImplementedError) as cm: + cnx.execute('Any fspath(F) WHERE F eid %(f)s', {'f': f1.eid}) + self.assertEqual(str(cm.exception), + 'This callback is only available for BytesFileSystemStorage ' + 'managed attribute. Is FSPATH() argument BFSS managed?') def test_bfss_storage(self): - f1 = self.create_file() - expected_filepath = osp.join(self.tempdir, '%s_data_%s' % - (f1.eid, f1.data_name)) - self.assertTrue(osp.isfile(expected_filepath)) - # file should be read only - self.assertFalse(os.access(expected_filepath, os.W_OK)) - self.assertEqual(file(expected_filepath).read(), 'the-data') - self.rollback() - self.assertFalse(osp.isfile(expected_filepath)) - f1 = self.create_file() - self.commit() - self.assertEqual(file(expected_filepath).read(), 'the-data') - f1.cw_set(data=Binary('the new data')) - self.rollback() - self.assertEqual(file(expected_filepath).read(), 'the-data') - f1.cw_delete() - self.assertTrue(osp.isfile(expected_filepath)) - self.rollback() - self.assertTrue(osp.isfile(expected_filepath)) - f1.cw_delete() - self.commit() - self.assertFalse(osp.isfile(expected_filepath)) + with self.admin_access.repo_cnx() as cnx: + f1 = self.create_file(cnx) + expected_filepath = osp.join(self.tempdir, '%s_data_%s' % + (f1.eid, f1.data_name)) + self.assertTrue(osp.isfile(expected_filepath)) + # file should be read only + self.assertFalse(os.access(expected_filepath, os.W_OK)) + self.assertEqual(file(expected_filepath).read(), 'the-data') + cnx.rollback() + self.assertFalse(osp.isfile(expected_filepath)) + f1 = self.create_file(cnx) + cnx.commit() + self.assertEqual(file(expected_filepath).read(), 'the-data') + f1.cw_set(data=Binary('the new data')) + cnx.rollback() + self.assertEqual(file(expected_filepath).read(), 'the-data') + f1.cw_delete() + self.assertTrue(osp.isfile(expected_filepath)) + cnx.rollback() + self.assertTrue(osp.isfile(expected_filepath)) + f1.cw_delete() + cnx.commit() + self.assertFalse(osp.isfile(expected_filepath)) def test_bfss_sqlite_fspath(self): - f1 = self.create_file() - expected_filepath = osp.join(self.tempdir, '%s_data_%s' % (f1.eid, f1.data_name)) - self.assertEqual(self.fspath(f1), expected_filepath) + with self.admin_access.repo_cnx() as cnx: + f1 = self.create_file(cnx) + expected_filepath = osp.join(self.tempdir, '%s_data_%s' % (f1.eid, f1.data_name)) + self.assertEqual(self.fspath(cnx, f1), expected_filepath) def test_bfss_fs_importing_doesnt_touch_path(self): - self.session.transaction_data['fs_importing'] = True - filepath = osp.abspath(__file__) - f1 = self.request().create_entity('File', data=Binary(filepath), - data_format=u'text/plain', data_name=u'foo') - self.assertEqual(self.fspath(f1), filepath) + with self.admin_access.repo_cnx() as cnx: + cnx.transaction_data['fs_importing'] = True + filepath = osp.abspath(__file__) + f1 = cnx.create_entity('File', data=Binary(filepath), + data_format=u'text/plain', data_name=u'foo') + self.assertEqual(self.fspath(cnx, f1), filepath) def test_source_storage_transparency(self): - with self.temporary_appobjects(DummyBeforeHook, DummyAfterHook): - self.create_file() + with self.admin_access.repo_cnx() as cnx: + with self.temporary_appobjects(DummyBeforeHook, DummyAfterHook): + self.create_file(cnx) def test_source_mapped_attribute_error_cases(self): - with self.assertRaises(QueryError) as cm: - self.execute('Any X WHERE X data ~= "hop", X is File') - self.assertEqual(str(cm.exception), 'can\'t use File.data (X data ILIKE "hop") in restriction') - with self.assertRaises(QueryError) as cm: - self.execute('Any X, Y WHERE X data D, Y data D, ' - 'NOT X identity Y, X is File, Y is File') - self.assertEqual(str(cm.exception), "can't use D as a restriction variable") - # query returning mix of mapped / regular attributes (only file.data - # mapped, not image.data for instance) - with self.assertRaises(QueryError) as cm: - self.execute('Any X WITH X BEING (' - ' (Any NULL)' - ' UNION ' - ' (Any D WHERE X data D, X is File)' - ')') - self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not') - with self.assertRaises(QueryError) as cm: - self.execute('(Any D WHERE X data D, X is File)' - ' UNION ' - '(Any D WHERE X title D, X is Bookmark)') - self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not') + with self.admin_access.repo_cnx() as cnx: + with self.assertRaises(QueryError) as cm: + cnx.execute('Any X WHERE X data ~= "hop", X is File') + self.assertEqual(str(cm.exception), 'can\'t use File.data (X data ILIKE "hop") in restriction') + with self.assertRaises(QueryError) as cm: + cnx.execute('Any X, Y WHERE X data D, Y data D, ' + 'NOT X identity Y, X is File, Y is File') + self.assertEqual(str(cm.exception), "can't use D as a restriction variable") + # query returning mix of mapped / regular attributes (only file.data + # mapped, not image.data for instance) + with self.assertRaises(QueryError) as cm: + cnx.execute('Any X WITH X BEING (' + ' (Any NULL)' + ' UNION ' + ' (Any D WHERE X data D, X is File)' + ')') + self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not') + with self.assertRaises(QueryError) as cm: + cnx.execute('(Any D WHERE X data D, X is File)' + ' UNION ' + '(Any D WHERE X title D, X is Bookmark)') + self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not') - storages.set_attribute_storage(self.repo, 'State', 'name', - storages.BytesFileSystemStorage(self.tempdir)) - try: - with self.assertRaises(QueryError) as cm: - self.execute('Any D WHERE X name D, X is IN (State, Transition)') - self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not') - finally: - storages.unset_attribute_storage(self.repo, 'State', 'name') + storages.set_attribute_storage(self.repo, 'State', 'name', + storages.BytesFileSystemStorage(self.tempdir)) + try: + with self.assertRaises(QueryError) as cm: + cnx.execute('Any D WHERE X name D, X is IN (State, Transition)') + self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not') + finally: + storages.unset_attribute_storage(self.repo, 'State', 'name') def test_source_mapped_attribute_advanced(self): - f1 = self.create_file() - rset = self.execute('Any X,D WITH D,X BEING (' - ' (Any D, X WHERE X eid %(x)s, X data D)' - ' UNION ' - ' (Any D, X WHERE X eid %(x)s, X data D)' - ')', {'x': f1.eid}) - self.assertEqual(len(rset), 2) - self.assertEqual(rset[0][0], f1.eid) - self.assertEqual(rset[1][0], f1.eid) - self.assertEqual(rset[0][1].getvalue(), 'the-data') - self.assertEqual(rset[1][1].getvalue(), 'the-data') - rset = self.execute('Any X,LENGTH(D) WHERE X eid %(x)s, X data D', - {'x': f1.eid}) - self.assertEqual(len(rset), 1) - self.assertEqual(rset[0][0], f1.eid) - self.assertEqual(rset[0][1], len('the-data')) - rset = self.execute('Any X,LENGTH(D) WITH D,X BEING (' - ' (Any D, X WHERE X eid %(x)s, X data D)' - ' UNION ' - ' (Any D, X WHERE X eid %(x)s, X data D)' - ')', {'x': f1.eid}) - self.assertEqual(len(rset), 2) - self.assertEqual(rset[0][0], f1.eid) - self.assertEqual(rset[1][0], f1.eid) - self.assertEqual(rset[0][1], len('the-data')) - self.assertEqual(rset[1][1], len('the-data')) - with self.assertRaises(QueryError) as cm: - self.execute('Any X,UPPER(D) WHERE X eid %(x)s, X data D', - {'x': f1.eid}) - self.assertEqual(str(cm.exception), 'UPPER can not be called on mapped attribute') + with self.admin_access.repo_cnx() as cnx: + f1 = self.create_file(cnx) + rset = cnx.execute('Any X,D WITH D,X BEING (' + ' (Any D, X WHERE X eid %(x)s, X data D)' + ' UNION ' + ' (Any D, X WHERE X eid %(x)s, X data D)' + ')', {'x': f1.eid}) + self.assertEqual(len(rset), 2) + self.assertEqual(rset[0][0], f1.eid) + self.assertEqual(rset[1][0], f1.eid) + self.assertEqual(rset[0][1].getvalue(), 'the-data') + self.assertEqual(rset[1][1].getvalue(), 'the-data') + rset = cnx.execute('Any X,LENGTH(D) WHERE X eid %(x)s, X data D', + {'x': f1.eid}) + self.assertEqual(len(rset), 1) + self.assertEqual(rset[0][0], f1.eid) + self.assertEqual(rset[0][1], len('the-data')) + rset = cnx.execute('Any X,LENGTH(D) WITH D,X BEING (' + ' (Any D, X WHERE X eid %(x)s, X data D)' + ' UNION ' + ' (Any D, X WHERE X eid %(x)s, X data D)' + ')', {'x': f1.eid}) + self.assertEqual(len(rset), 2) + self.assertEqual(rset[0][0], f1.eid) + self.assertEqual(rset[1][0], f1.eid) + self.assertEqual(rset[0][1], len('the-data')) + self.assertEqual(rset[1][1], len('the-data')) + with self.assertRaises(QueryError) as cm: + cnx.execute('Any X,UPPER(D) WHERE X eid %(x)s, X data D', + {'x': f1.eid}) + self.assertEqual(str(cm.exception), 'UPPER can not be called on mapped attribute') def test_bfss_fs_importing_transparency(self): - self.session.transaction_data['fs_importing'] = True - filepath = osp.abspath(__file__) - f1 = self.session.create_entity('File', data=Binary(filepath), - data_format=u'text/plain', data_name=u'foo') - cw_value = f1.data.getvalue() - fs_value = file(filepath).read() - if cw_value != fs_value: - self.fail('cw value %r is different from file content' % cw_value) - + with self.admin_access.repo_cnx() as cnx: + cnx.transaction_data['fs_importing'] = True + filepath = osp.abspath(__file__) + f1 = cnx.create_entity('File', data=Binary(filepath), + data_format=u'text/plain', data_name=u'foo') + cw_value = f1.data.getvalue() + fs_value = file(filepath).read() + if cw_value != fs_value: + self.fail('cw value %r is different from file content' % cw_value) @tag('update') def test_bfss_update_with_existing_data(self): - # use self.session to use server-side cache - f1 = self.session.create_entity('File', data=Binary('some data'), - data_format=u'text/plain', data_name=u'foo') - # NOTE: do not use cw_set() which would automatically - # update f1's local dict. We want the pure rql version to work - self.execute('SET F data %(d)s WHERE F eid %(f)s', - {'d': Binary('some other data'), 'f': f1.eid}) - self.assertEqual(f1.data.getvalue(), 'some other data') - self.commit() - f2 = self.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0) - self.assertEqual(f2.data.getvalue(), 'some other data') + with self.admin_access.repo_cnx() as cnx: + f1 = cnx.create_entity('File', data=Binary('some data'), + data_format=u'text/plain', data_name=u'foo') + # NOTE: do not use cw_set() which would automatically + # update f1's local dict. We want the pure rql version to work + cnx.execute('SET F data %(d)s WHERE F eid %(f)s', + {'d': Binary('some other data'), 'f': f1.eid}) + self.assertEqual(f1.data.getvalue(), 'some other data') + cnx.commit() + f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0) + self.assertEqual(f2.data.getvalue(), 'some other data') @tag('update', 'extension', 'commit') def test_bfss_update_with_different_extension_commited(self): - # use self.session to use server-side cache - f1 = self.session.create_entity('File', data=Binary('some data'), - data_format=u'text/plain', data_name=u'foo.txt') - # NOTE: do not use cw_set() which would automatically - # update f1's local dict. We want the pure rql version to work - self.commit() - old_path = self.fspath(f1) - self.assertTrue(osp.isfile(old_path)) - self.assertEqual(osp.splitext(old_path)[1], '.txt') - self.execute('SET F data %(d)s, F data_name %(dn)s, F data_format %(df)s WHERE F eid %(f)s', - {'d': Binary('some other data'), 'f': f1.eid, 'dn': u'bar.jpg', 'df': u'image/jpeg'}) - self.commit() - # the new file exists with correct extension - # the old file is dead - f2 = self.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0) - new_path = self.fspath(f2) - self.assertFalse(osp.isfile(old_path)) - self.assertTrue(osp.isfile(new_path)) - self.assertEqual(osp.splitext(new_path)[1], '.jpg') + with self.admin_access.repo_cnx() as cnx: + f1 = cnx.create_entity('File', data=Binary('some data'), + data_format=u'text/plain', data_name=u'foo.txt') + # NOTE: do not use cw_set() which would automatically + # update f1's local dict. We want the pure rql version to work + cnx.commit() + old_path = self.fspath(cnx, f1) + self.assertTrue(osp.isfile(old_path)) + self.assertEqual(osp.splitext(old_path)[1], '.txt') + cnx.execute('SET F data %(d)s, F data_name %(dn)s, ' + 'F data_format %(df)s WHERE F eid %(f)s', + {'d': Binary('some other data'), 'f': f1.eid, + 'dn': u'bar.jpg', 'df': u'image/jpeg'}) + cnx.commit() + # the new file exists with correct extension + # the old file is dead + f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0) + new_path = self.fspath(cnx, f2) + self.assertFalse(osp.isfile(old_path)) + self.assertTrue(osp.isfile(new_path)) + self.assertEqual(osp.splitext(new_path)[1], '.jpg') @tag('update', 'extension', 'rollback') def test_bfss_update_with_different_extension_rolled_back(self): - # use self.session to use server-side cache - f1 = self.session.create_entity('File', data=Binary('some data'), - data_format=u'text/plain', data_name=u'foo.txt') - # NOTE: do not use cw_set() which would automatically - # update f1's local dict. We want the pure rql version to work - self.commit() - old_path = self.fspath(f1) - old_data = f1.data.getvalue() - self.assertTrue(osp.isfile(old_path)) - self.assertEqual(osp.splitext(old_path)[1], '.txt') - self.execute('SET F data %(d)s, F data_name %(dn)s, F data_format %(df)s WHERE F eid %(f)s', - {'d': Binary('some other data'), 'f': f1.eid, 'dn': u'bar.jpg', 'df': u'image/jpeg'}) - self.rollback() - # the new file exists with correct extension - # the old file is dead - f2 = self.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0) - new_path = self.fspath(f2) - new_data = f2.data.getvalue() - self.assertTrue(osp.isfile(new_path)) - self.assertEqual(osp.splitext(new_path)[1], '.txt') - self.assertEqual(old_path, new_path) - self.assertEqual(old_data, new_data) + with self.admin_access.repo_cnx() as cnx: + f1 = cnx.create_entity('File', data=Binary('some data'), + data_format=u'text/plain', data_name=u'foo.txt') + # NOTE: do not use cw_set() which would automatically + # update f1's local dict. We want the pure rql version to work + cnx.commit() + old_path = self.fspath(cnx, f1) + old_data = f1.data.getvalue() + self.assertTrue(osp.isfile(old_path)) + self.assertEqual(osp.splitext(old_path)[1], '.txt') + cnx.execute('SET F data %(d)s, F data_name %(dn)s, ' + 'F data_format %(df)s WHERE F eid %(f)s', + {'d': Binary('some other data'), + 'f': f1.eid, + 'dn': u'bar.jpg', + 'df': u'image/jpeg'}) + cnx.rollback() + # the new file exists with correct extension + # the old file is dead + f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', + {'f': f1.eid}).get_entity(0, 0) + new_path = self.fspath(cnx, f2) + new_data = f2.data.getvalue() + self.assertTrue(osp.isfile(new_path)) + self.assertEqual(osp.splitext(new_path)[1], '.txt') + self.assertEqual(old_path, new_path) + self.assertEqual(old_data, new_data) @tag('update', 'NULL') def test_bfss_update_to_None(self): - f = self.session.create_entity('Affaire', opt_attr=Binary('toto')) - self.session.commit() - self.session.set_cnxset() - f.cw_set(opt_attr=None) - self.session.commit() + with self.admin_access.repo_cnx() as cnx: + f = cnx.create_entity('Affaire', opt_attr=Binary('toto')) + cnx.commit() + f.cw_set(opt_attr=None) + cnx.commit() @tag('fs_importing', 'update') def test_bfss_update_with_fs_importing(self): - # use self.session to use server-side cache - f1 = self.session.create_entity('File', data=Binary('some data'), - data_format=u'text/plain', data_name=u'foo') - old_fspath = self.fspath(f1) - self.session.transaction_data['fs_importing'] = True - new_fspath = osp.join(self.tempdir, 'newfile.txt') - file(new_fspath, 'w').write('the new data') - self.execute('SET F data %(d)s WHERE F eid %(f)s', - {'d': Binary(new_fspath), 'f': f1.eid}) - self.commit() - self.assertEqual(f1.data.getvalue(), 'the new data') - self.assertEqual(self.fspath(f1), new_fspath) - self.assertFalse(osp.isfile(old_fspath)) + with self.admin_access.repo_cnx() as cnx: + f1 = cnx.create_entity('File', data=Binary('some data'), + data_format=u'text/plain', + data_name=u'foo') + old_fspath = self.fspath(cnx, f1) + cnx.transaction_data['fs_importing'] = True + new_fspath = osp.join(self.tempdir, 'newfile.txt') + file(new_fspath, 'w').write('the new data') + cnx.execute('SET F data %(d)s WHERE F eid %(f)s', + {'d': Binary(new_fspath), 'f': f1.eid}) + cnx.commit() + self.assertEqual(f1.data.getvalue(), 'the new data') + self.assertEqual(self.fspath(cnx, f1), new_fspath) + self.assertFalse(osp.isfile(old_fspath)) @tag('fsimport') def test_clean(self): - fsimport = storages.fsimport - td = self.session.transaction_data - self.assertNotIn('fs_importing', td) - with fsimport(self.session): - self.assertIn('fs_importing', td) - self.assertTrue(td['fs_importing']) - self.assertNotIn('fs_importing', td) + with self.admin_access.repo_cnx() as cnx: + fsimport = storages.fsimport + td = cnx.transaction_data + self.assertNotIn('fs_importing', td) + with fsimport(cnx): + self.assertIn('fs_importing', td) + self.assertTrue(td['fs_importing']) + self.assertNotIn('fs_importing', td) @tag('fsimport') def test_true(self): - fsimport = storages.fsimport - td = self.session.transaction_data - td['fs_importing'] = True - with fsimport(self.session): - self.assertIn('fs_importing', td) + with self.admin_access.repo_cnx() as cnx: + fsimport = storages.fsimport + td = cnx.transaction_data + td['fs_importing'] = True + with fsimport(cnx): + self.assertIn('fs_importing', td) + self.assertTrue(td['fs_importing']) self.assertTrue(td['fs_importing']) - self.assertTrue(td['fs_importing']) @tag('fsimport') def test_False(self): - fsimport = storages.fsimport - td = self.session.transaction_data - td['fs_importing'] = False - with fsimport(self.session): - self.assertIn('fs_importing', td) - self.assertTrue(td['fs_importing']) - self.assertFalse(td['fs_importing']) + with self.admin_access.repo_cnx() as cnx: + fsimport = storages.fsimport + td = cnx.transaction_data + td['fs_importing'] = False + with fsimport(cnx): + self.assertIn('fs_importing', td) + self.assertTrue(td['fs_importing']) + self.assertFalse(td['fs_importing']) if __name__ == '__main__': unittest_main() diff -r 84738d495ffd -r 793377697c81 server/test/unittest_undo.py --- a/server/test/unittest_undo.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/test/unittest_undo.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -20,47 +20,51 @@ from cubicweb import ValidationError from cubicweb.devtools.testlib import CubicWebTC import cubicweb.server.session -from cubicweb.server.session import Transaction as OldTransaction -from cubicweb.transaction import * +from cubicweb.server.session import Connection as OldConnection from cubicweb.server.sources.native import UndoTransactionException, _UndoException +from cubicweb.transaction import NoSuchTransaction class UndoableTransactionTC(CubicWebTC): def setup_database(self): - req = self.request() - self.toto = self.create_user(req, 'toto', password='toto', groups=('users',), - commit=False) - self.txuuid = self.commit() + with self.admin_access.repo_cnx() as cnx: + self.totoeid = self.create_user(cnx, 'toto', + password='toto', + groups=('users',), + commit=False).eid + self.txuuid = cnx.commit() + + def toto(self, cnx): + return cnx.entity_from_eid(self.totoeid) def setUp(self): - class Transaction(OldTransaction): + class Connection(OldConnection): """Force undo feature to be turned on in all case""" undo_actions = property(lambda tx: True, lambda x, y:None) - cubicweb.server.session.Transaction = Transaction + cubicweb.server.session.Connection = Connection super(UndoableTransactionTC, self).setUp() def tearDown(self): - cubicweb.server.session.Transaction = OldTransaction + cubicweb.server.session.Connection = OldConnection self.restore_connection() - self.session.undo_support = set() super(UndoableTransactionTC, self).tearDown() - def check_transaction_deleted(self, txuuid): + def check_transaction_deleted(self, cnx, txuuid): # also check transaction actions have been properly deleted - cu = self.session.system_sql( + cu = cnx.system_sql( "SELECT * from tx_entity_actions WHERE tx_uuid='%s'" % txuuid) self.assertFalse(cu.fetchall()) - cu = self.session.system_sql( + cu = cnx.system_sql( "SELECT * from tx_relation_actions WHERE tx_uuid='%s'" % txuuid) self.assertFalse(cu.fetchall()) - def assertUndoTransaction(self, txuuid, expected_errors=None): + def assertUndoTransaction(self, cnx, txuuid, expected_errors=None): if expected_errors is None : expected_errors = [] try: - self.cnx.undo_transaction(txuuid) + cnx.undo_transaction(txuuid) except UndoTransactionException as exn: errors = exn.errors else: @@ -70,238 +74,243 @@ def test_undo_api(self): self.assertTrue(self.txuuid) # test transaction api - self.assertRaises(NoSuchTransaction, - self.cnx.transaction_info, 'hop') - self.assertRaises(NoSuchTransaction, - self.cnx.transaction_actions, 'hop') - self.assertRaises(NoSuchTransaction, - self.cnx.undo_transaction, 'hop') - txinfo = self.cnx.transaction_info(self.txuuid) - self.assertTrue(txinfo.datetime) - self.assertEqual(txinfo.user_eid, self.session.user.eid) - self.assertEqual(txinfo.user().login, 'admin') - actions = txinfo.actions_list() - self.assertEqual(len(actions), 2) - actions = txinfo.actions_list(public=False) - self.assertEqual(len(actions), 6) - a1 = actions[0] - self.assertEqual(a1.action, 'C') - self.assertEqual(a1.eid, self.toto.eid) - self.assertEqual(a1.etype,'CWUser') - self.assertEqual(a1.ertype, 'CWUser') - self.assertEqual(a1.changes, None) - self.assertEqual(a1.public, True) - self.assertEqual(a1.order, 1) - a4 = actions[3] - self.assertEqual(a4.action, 'A') - self.assertEqual(a4.rtype, 'in_group') - self.assertEqual(a4.ertype, 'in_group') - self.assertEqual(a4.eid_from, self.toto.eid) - self.assertEqual(a4.eid_to, self.toto.in_group[0].eid) - self.assertEqual(a4.order, 4) - for i, rtype in ((1, 'owned_by'), (2, 'owned_by'), - (4, 'in_state'), (5, 'created_by')): - a = actions[i] - self.assertEqual(a.action, 'A') - self.assertEqual(a.eid_from, self.toto.eid) - self.assertEqual(a.rtype, rtype) - self.assertEqual(a.order, i+1) - # test undoable_transactions - txs = self.cnx.undoable_transactions() - self.assertEqual(len(txs), 1) - self.assertEqual(txs[0].uuid, self.txuuid) - # test transaction_info / undoable_transactions security - cnx = self.login('anon') - self.assertRaises(NoSuchTransaction, - cnx.transaction_info, self.txuuid) - self.assertRaises(NoSuchTransaction, - cnx.transaction_actions, self.txuuid) - self.assertRaises(NoSuchTransaction, - cnx.undo_transaction, self.txuuid) - txs = cnx.undoable_transactions() - self.assertEqual(len(txs), 0) + with self.admin_access.client_cnx() as cnx: + self.assertRaises(NoSuchTransaction, + cnx.transaction_info, 'hop') + self.assertRaises(NoSuchTransaction, + cnx.transaction_actions, 'hop') + self.assertRaises(NoSuchTransaction, + cnx.undo_transaction, 'hop') + txinfo = cnx.transaction_info(self.txuuid) + self.assertTrue(txinfo.datetime) + self.assertEqual(txinfo.user_eid, cnx.user.eid) + self.assertEqual(txinfo.user().login, 'admin') + actions = txinfo.actions_list() + self.assertEqual(len(actions), 2) + actions = txinfo.actions_list(public=False) + self.assertEqual(len(actions), 6) + a1 = actions[0] + self.assertEqual(a1.action, 'C') + self.assertEqual(a1.eid, self.totoeid) + self.assertEqual(a1.etype,'CWUser') + self.assertEqual(a1.ertype, 'CWUser') + self.assertEqual(a1.changes, None) + self.assertEqual(a1.public, True) + self.assertEqual(a1.order, 1) + a4 = actions[3] + self.assertEqual(a4.action, 'A') + self.assertEqual(a4.rtype, 'in_group') + self.assertEqual(a4.ertype, 'in_group') + self.assertEqual(a4.eid_from, self.totoeid) + self.assertEqual(a4.eid_to, self.toto(cnx).in_group[0].eid) + self.assertEqual(a4.order, 4) + for i, rtype in ((1, 'owned_by'), (2, 'owned_by'), + (4, 'in_state'), (5, 'created_by')): + a = actions[i] + self.assertEqual(a.action, 'A') + self.assertEqual(a.eid_from, self.totoeid) + self.assertEqual(a.rtype, rtype) + self.assertEqual(a.order, i+1) + # test undoable_transactions + txs = cnx.undoable_transactions() + self.assertEqual(len(txs), 1) + self.assertEqual(txs[0].uuid, self.txuuid) + # test transaction_info / undoable_transactions security + with self.new_access('anon').client_cnx() as cnx: + self.assertRaises(NoSuchTransaction, + cnx.transaction_info, self.txuuid) + self.assertRaises(NoSuchTransaction, + cnx.transaction_actions, self.txuuid) + self.assertRaises(NoSuchTransaction, + cnx.undo_transaction, self.txuuid) + txs = cnx.undoable_transactions() + self.assertEqual(len(txs), 0) def test_undoable_transactions(self): - toto = self.toto - e = self.session.create_entity('EmailAddress', - address=u'toto@logilab.org', - reverse_use_email=toto) - txuuid1 = self.commit() - toto.cw_delete() - txuuid2 = self.commit() - undoable_transactions = self.cnx.undoable_transactions - txs = undoable_transactions(action='D') - self.assertEqual(len(txs), 1, txs) - self.assertEqual(txs[0].uuid, txuuid2) - txs = undoable_transactions(action='C') - self.assertEqual(len(txs), 2, txs) - self.assertEqual(txs[0].uuid, txuuid1) - self.assertEqual(txs[1].uuid, self.txuuid) - txs = undoable_transactions(eid=toto.eid) - self.assertEqual(len(txs), 3) - self.assertEqual(txs[0].uuid, txuuid2) - self.assertEqual(txs[1].uuid, txuuid1) - self.assertEqual(txs[2].uuid, self.txuuid) - txs = undoable_transactions(etype='CWUser') - self.assertEqual(len(txs), 2) - txs = undoable_transactions(etype='CWUser', action='C') - self.assertEqual(len(txs), 1) - self.assertEqual(txs[0].uuid, self.txuuid) - txs = undoable_transactions(etype='EmailAddress', action='D') - self.assertEqual(len(txs), 0) - txs = undoable_transactions(etype='EmailAddress', action='D', - public=False) - self.assertEqual(len(txs), 1) - self.assertEqual(txs[0].uuid, txuuid2) - txs = undoable_transactions(eid=toto.eid, action='R', public=False) - self.assertEqual(len(txs), 1) - self.assertEqual(txs[0].uuid, txuuid2) + with self.admin_access.client_cnx() as cnx: + toto = self.toto(cnx) + e = cnx.create_entity('EmailAddress', + address=u'toto@logilab.org', + reverse_use_email=toto) + txuuid1 = cnx.commit() + toto.cw_delete() + txuuid2 = cnx.commit() + undoable_transactions = cnx.undoable_transactions + txs = undoable_transactions(action='D') + self.assertEqual(len(txs), 1, txs) + self.assertEqual(txs[0].uuid, txuuid2) + txs = undoable_transactions(action='C') + self.assertEqual(len(txs), 2, txs) + self.assertEqual(txs[0].uuid, txuuid1) + self.assertEqual(txs[1].uuid, self.txuuid) + txs = undoable_transactions(eid=toto.eid) + self.assertEqual(len(txs), 3) + self.assertEqual(txs[0].uuid, txuuid2) + self.assertEqual(txs[1].uuid, txuuid1) + self.assertEqual(txs[2].uuid, self.txuuid) + txs = undoable_transactions(etype='CWUser') + self.assertEqual(len(txs), 2) + txs = undoable_transactions(etype='CWUser', action='C') + self.assertEqual(len(txs), 1) + self.assertEqual(txs[0].uuid, self.txuuid) + txs = undoable_transactions(etype='EmailAddress', action='D') + self.assertEqual(len(txs), 0) + txs = undoable_transactions(etype='EmailAddress', action='D', + public=False) + self.assertEqual(len(txs), 1) + self.assertEqual(txs[0].uuid, txuuid2) + txs = undoable_transactions(eid=toto.eid, action='R', public=False) + self.assertEqual(len(txs), 1) + self.assertEqual(txs[0].uuid, txuuid2) def test_undo_deletion_base(self): - toto = self.toto - e = self.session.create_entity('EmailAddress', - address=u'toto@logilab.org', - reverse_use_email=toto) - # entity with inlined relation - p = self.session.create_entity('CWProperty', - pkey=u'ui.default-text-format', - value=u'text/rest', - for_user=toto) - self.commit() - txs = self.cnx.undoable_transactions() - self.assertEqual(len(txs), 2) - toto.cw_delete() - txuuid = self.commit() - actions = self.cnx.transaction_info(txuuid).actions_list() - self.assertEqual(len(actions), 1) - toto.cw_clear_all_caches() - e.cw_clear_all_caches() - self.assertUndoTransaction(txuuid) - undotxuuid = self.commit() - self.assertEqual(undotxuuid, None) # undo not undoable - self.assertTrue(self.execute('Any X WHERE X eid %(x)s', {'x': toto.eid})) - self.assertTrue(self.execute('Any X WHERE X eid %(x)s', {'x': e.eid})) - self.assertTrue(self.execute('Any X WHERE X has_text "toto@logilab"')) - self.assertEqual(toto.cw_adapt_to('IWorkflowable').state, 'activated') - self.assertEqual(toto.cw_adapt_to('IEmailable').get_email(), 'toto@logilab.org') - self.assertEqual([(p.pkey, p.value) for p in toto.reverse_for_user], - [('ui.default-text-format', 'text/rest')]) - self.assertEqual([g.name for g in toto.in_group], - ['users']) - self.assertEqual([et.name for et in toto.related('is', entities=True)], - ['CWUser']) - self.assertEqual([et.name for et in toto.is_instance_of], - ['CWUser']) - # undoing shouldn't be visble in undoable transaction, and the undone - # transaction should be removed - txs = self.cnx.undoable_transactions() - self.assertEqual(len(txs), 2) - self.assertRaises(NoSuchTransaction, - self.cnx.transaction_info, txuuid) - self.check_transaction_deleted(txuuid) - # the final test: check we can login with the previously deleted user - self.login('toto') + with self.admin_access.client_cnx() as cnx: + toto = self.toto(cnx) + e = cnx.create_entity('EmailAddress', + address=u'toto@logilab.org', + reverse_use_email=toto) + # entity with inlined relation + p = cnx.create_entity('CWProperty', + pkey=u'ui.default-text-format', + value=u'text/rest', + for_user=toto) + cnx.commit() + txs = cnx.undoable_transactions() + self.assertEqual(len(txs), 2) + toto.cw_delete() + txuuid = cnx.commit() + actions = cnx.transaction_info(txuuid).actions_list() + self.assertEqual(len(actions), 1) + toto.cw_clear_all_caches() + e.cw_clear_all_caches() + self.assertUndoTransaction(cnx, txuuid) + undotxuuid = cnx.commit() + self.assertEqual(undotxuuid, None) # undo not undoable + self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x': toto.eid})) + self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x': e.eid})) + self.assertTrue(cnx.execute('Any X WHERE X has_text "toto@logilab"')) + self.assertEqual(toto.cw_adapt_to('IWorkflowable').state, 'activated') + self.assertEqual(toto.cw_adapt_to('IEmailable').get_email(), 'toto@logilab.org') + self.assertEqual([(p.pkey, p.value) for p in toto.reverse_for_user], + [('ui.default-text-format', 'text/rest')]) + self.assertEqual([g.name for g in toto.in_group], + ['users']) + self.assertEqual([et.name for et in toto.related('is', entities=True)], + ['CWUser']) + self.assertEqual([et.name for et in toto.is_instance_of], + ['CWUser']) + # undoing shouldn't be visble in undoable transaction, and the undone + # transaction should be removed + txs = self.cnx.undoable_transactions() + self.assertEqual(len(txs), 2) + self.assertRaises(NoSuchTransaction, + self.cnx.transaction_info, txuuid) + with self.admin_access.repo_cnx() as cnx: + with cnx.ensure_cnx_set: + self.check_transaction_deleted(cnx, txuuid) + # the final test: check we can login with the previously deleted user + with self.new_access('toto').client_cnx(): + pass def test_undo_deletion_integrity_1(self): - session = self.session - # 'Personne fiche Card with' '??' cardinality - c = session.create_entity('Card', title=u'hop', content=u'hop') - p = session.create_entity('Personne', nom=u'louis', fiche=c) - self.commit() - c.cw_delete() - txuuid = self.commit() - c2 = session.create_entity('Card', title=u'hip', content=u'hip') - p.cw_set(fiche=c2) - self.commit() - self.assertUndoTransaction(txuuid, [ - "Can't restore object relation fiche to entity " - "%s which is already linked using this relation." % p.eid]) - self.commit() - p.cw_clear_all_caches() - self.assertEqual(p.fiche[0].eid, c2.eid) + with self.admin_access.client_cnx() as cnx: + # 'Personne fiche Card with' '??' cardinality + c = cnx.create_entity('Card', title=u'hop', content=u'hop') + p = cnx.create_entity('Personne', nom=u'louis', fiche=c) + cnx.commit() + c.cw_delete() + txuuid = cnx.commit() + c2 = cnx.create_entity('Card', title=u'hip', content=u'hip') + p.cw_set(fiche=c2) + cnx.commit() + self.assertUndoTransaction(cnx, txuuid, [ + "Can't restore object relation fiche to entity " + "%s which is already linked using this relation." % p.eid]) + cnx.commit() + p.cw_clear_all_caches() + self.assertEqual(p.fiche[0].eid, c2.eid) def test_undo_deletion_integrity_2(self): - # test validation error raised if we can't restore a required relation - session = self.session - g = session.create_entity('CWGroup', name=u'staff') - session.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.toto.eid}) - self.toto.cw_set(in_group=g) - self.commit() - self.toto.cw_delete() - txuuid = self.commit() - g.cw_delete() - self.commit() - self.assertUndoTransaction(txuuid, [ - u"Can't restore relation in_group, object entity " - "%s doesn't exist anymore." % g.eid]) - with self.assertRaises(ValidationError) as cm: - self.commit() - cm.exception.translate(unicode) - self.assertEqual(cm.exception.entity, self.toto.eid) - self.assertEqual(cm.exception.errors, - {'in_group-subject': u'at least one relation in_group is ' - 'required on CWUser (%s)' % self.toto.eid}) + with self.admin_access.client_cnx() as cnx: + # test validation error raised if we can't restore a required relation + g = cnx.create_entity('CWGroup', name=u'staff') + cnx.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.totoeid}) + self.toto(cnx).cw_set(in_group=g) + cnx.commit() + self.toto(cnx).cw_delete() + txuuid = cnx.commit() + g.cw_delete() + cnx.commit() + self.assertUndoTransaction(cnx, txuuid, [ + u"Can't restore relation in_group, object entity " + "%s doesn't exist anymore." % g.eid]) + with self.assertRaises(ValidationError) as cm: + cnx.commit() + cm.exception.translate(unicode) + self.assertEqual(cm.exception.entity, self.totoeid) + self.assertEqual(cm.exception.errors, + {'in_group-subject': u'at least one relation in_group is ' + 'required on CWUser (%s)' % self.totoeid}) def test_undo_creation_1(self): - session = self.session - c = session.create_entity('Card', title=u'hop', content=u'hop') - p = session.create_entity('Personne', nom=u'louis', fiche=c) - txuuid = self.commit() - self.assertUndoTransaction(txuuid) - self.commit() - self.assertFalse(self.execute('Any X WHERE X eid %(x)s', {'x': c.eid})) - self.assertFalse(self.execute('Any X WHERE X eid %(x)s', {'x': p.eid})) - self.assertFalse(self.execute('Any X,Y WHERE X fiche Y')) - self.session.set_cnxset() - for eid in (p.eid, c.eid): - self.assertFalse(session.system_sql( - 'SELECT * FROM entities WHERE eid=%s' % eid).fetchall()) - self.assertFalse(session.system_sql( - 'SELECT 1 FROM owned_by_relation WHERE eid_from=%s' % eid).fetchall()) - # added by sql in hooks (except when using dataimport) - self.assertFalse(session.system_sql( - 'SELECT 1 FROM is_relation WHERE eid_from=%s' % eid).fetchall()) - self.assertFalse(session.system_sql( - 'SELECT 1 FROM is_instance_of_relation WHERE eid_from=%s' % eid).fetchall()) - self.check_transaction_deleted(txuuid) - + with self.admin_access.client_cnx() as cnx: + c = cnx.create_entity('Card', title=u'hop', content=u'hop') + p = cnx.create_entity('Personne', nom=u'louis', fiche=c) + txuuid = cnx.commit() + self.assertUndoTransaction(cnx, txuuid) + cnx.commit() + self.assertFalse(cnx.execute('Any X WHERE X eid %(x)s', {'x': c.eid})) + self.assertFalse(cnx.execute('Any X WHERE X eid %(x)s', {'x': p.eid})) + self.assertFalse(cnx.execute('Any X,Y WHERE X fiche Y')) + with self.admin_access.repo_cnx() as cnx: + with cnx.ensure_cnx_set: + for eid in (p.eid, c.eid): + self.assertFalse(cnx.system_sql( + 'SELECT * FROM entities WHERE eid=%s' % eid).fetchall()) + self.assertFalse(cnx.system_sql( + 'SELECT 1 FROM owned_by_relation WHERE eid_from=%s' % eid).fetchall()) + # added by sql in hooks (except when using dataimport) + self.assertFalse(cnx.system_sql( + 'SELECT 1 FROM is_relation WHERE eid_from=%s' % eid).fetchall()) + self.assertFalse(cnx.system_sql( + 'SELECT 1 FROM is_instance_of_relation WHERE eid_from=%s' % eid).fetchall()) + self.check_transaction_deleted(cnx, txuuid) def test_undo_creation_integrity_1(self): - session = self.session - req = self.request() - tutu = self.create_user(req, 'tutu', commit=False) - txuuid = self.commit() - email = self.request().create_entity('EmailAddress', address=u'tutu@cubicweb.org') - prop = self.request().create_entity('CWProperty', pkey=u'ui.default-text-format', - value=u'text/html') - tutu.cw_set(use_email=email, reverse_for_user=prop) - self.commit() - with self.assertRaises(ValidationError) as cm: - self.cnx.undo_transaction(txuuid) - self.assertEqual(cm.exception.entity, tutu.eid) - self.assertEqual(cm.exception.errors, - {None: 'some later transaction(s) touch entity, undo them first'}) + with self.admin_access.client_cnx() as cnx: + tutu = self.create_user(cnx, 'tutu', commit=False) + txuuid = cnx.commit() + email = cnx.create_entity('EmailAddress', address=u'tutu@cubicweb.org') + prop = cnx.create_entity('CWProperty', pkey=u'ui.default-text-format', + value=u'text/html') + tutu.cw_set(use_email=email, reverse_for_user=prop) + cnx.commit() + with self.assertRaises(ValidationError) as cm: + cnx.undo_transaction(txuuid) + self.assertEqual(cm.exception.entity, tutu.eid) + self.assertEqual(cm.exception.errors, + {None: 'some later transaction(s) touch entity, undo them first'}) def test_undo_creation_integrity_2(self): - session = self.session - g = session.create_entity('CWGroup', name=u'staff') - txuuid = self.commit() - session.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.toto.eid}) - self.toto.cw_set(in_group=g) - self.commit() - with self.assertRaises(ValidationError) as cm: - self.cnx.undo_transaction(txuuid) - self.assertEqual(cm.exception.entity, g.eid) - self.assertEqual(cm.exception.errors, - {None: 'some later transaction(s) touch entity, undo them first'}) + with self.admin_access.client_cnx() as cnx: + g = cnx.create_entity('CWGroup', name=u'staff') + txuuid = cnx.commit() + cnx.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.totoeid}) + self.toto(cnx).cw_set(in_group=g) + cnx.commit() + with self.assertRaises(ValidationError) as cm: + cnx.undo_transaction(txuuid) + self.assertEqual(cm.exception.entity, g.eid) + self.assertEqual(cm.exception.errors, + {None: 'some later transaction(s) touch entity, undo them first'}) # self.assertEqual(errors, # [u"Can't restore relation in_group, object entity " # "%s doesn't exist anymore." % g.eid]) - # with self.assertRaises(ValidationError) as cm: self.commit() - # self.assertEqual(cm.exception.entity, self.toto.eid) + # with self.assertRaises(ValidationError) as cm: cnx.commit() + # self.assertEqual(cm.exception.entity, self.totoeid) # self.assertEqual(cm.exception.errors, # {'in_group-subject': u'at least one relation in_group is ' - # 'required on CWUser (%s)' % self.toto.eid}) + # 'required on CWUser (%s)' % self.totoeid}) # test implicit 'replacement' of an inlined relation @@ -309,124 +318,124 @@ """Undo remove relation Personne (?) fiche (?) Card NB: processed by `_undo_r` as expected""" - session = self.session - c = session.create_entity('Card', title=u'hop', content=u'hop') - p = session.create_entity('Personne', nom=u'louis', fiche=c) - self.commit() - p.cw_set(fiche=None) - txuuid = self.commit() - self.assertUndoTransaction(txuuid) - self.commit() - p.cw_clear_all_caches() - self.assertEqual(p.fiche[0].eid, c.eid) + with self.admin_access.client_cnx() as cnx: + c = cnx.create_entity('Card', title=u'hop', content=u'hop') + p = cnx.create_entity('Personne', nom=u'louis', fiche=c) + cnx.commit() + p.cw_set(fiche=None) + txuuid = cnx.commit() + self.assertUndoTransaction(cnx, txuuid) + cnx.commit() + p.cw_clear_all_caches() + self.assertEqual(p.fiche[0].eid, c.eid) def test_undo_inline_rel_remove_ko(self): """Restore an inlined relation to a deleted entity, with an error. NB: processed by `_undo_r` as expected""" - session = self.session - c = session.create_entity('Card', title=u'hop', content=u'hop') - p = session.create_entity('Personne', nom=u'louis', fiche=c) - self.commit() - p.cw_set(fiche=None) - txuuid = self.commit() - c.cw_delete() - self.commit() - self.assertUndoTransaction(txuuid, [ - "Can't restore relation fiche, object entity %d doesn't exist anymore." % c.eid]) - self.commit() - p.cw_clear_all_caches() - self.assertFalse(p.fiche) - self.assertIsNone(session.system_sql( - 'SELECT cw_fiche FROM cw_Personne WHERE cw_eid=%s' % p.eid).fetchall()[0][0]) + with self.admin_access.client_cnx() as cnx: + c = cnx.create_entity('Card', title=u'hop', content=u'hop') + p = cnx.create_entity('Personne', nom=u'louis', fiche=c) + cnx.commit() + p.cw_set(fiche=None) + txuuid = cnx.commit() + c.cw_delete() + cnx.commit() + self.assertUndoTransaction(cnx, txuuid, [ + "Can't restore relation fiche, object entity %d doesn't exist anymore." % c.eid]) + cnx.commit() + p.cw_clear_all_caches() + self.assertFalse(p.fiche) + with self.admin_access.repo_cnx() as cnx: + with cnx.ensure_cnx_set: + self.assertIsNone(cnx.system_sql( + 'SELECT cw_fiche FROM cw_Personne WHERE cw_eid=%s' % p.eid).fetchall()[0][0]) def test_undo_inline_rel_add_ok(self): """Undo add relation Personne (?) fiche (?) Card Caution processed by `_undo_u`, not `_undo_a` !""" - session = self.session - c = session.create_entity('Card', title=u'hop', content=u'hop') - p = session.create_entity('Personne', nom=u'louis') - self.commit() - p.cw_set(fiche=c) - txuuid = self.commit() - self.assertUndoTransaction(txuuid) - self.commit() - p.cw_clear_all_caches() - self.assertFalse(p.fiche) + with self.admin_access.client_cnx() as cnx: + c = cnx.create_entity('Card', title=u'hop', content=u'hop') + p = cnx.create_entity('Personne', nom=u'louis') + cnx.commit() + p.cw_set(fiche=c) + txuuid = cnx.commit() + self.assertUndoTransaction(cnx, txuuid) + cnx.commit() + p.cw_clear_all_caches() + self.assertFalse(p.fiche) def test_undo_inline_rel_add_ko(self): """Undo add relation Personne (?) fiche (?) Card Caution processed by `_undo_u`, not `_undo_a` !""" - session = self.session - c = session.create_entity('Card', title=u'hop', content=u'hop') - p = session.create_entity('Personne', nom=u'louis') - self.commit() - p.cw_set(fiche=c) - txuuid = self.commit() - c.cw_delete() - self.commit() - self.assertUndoTransaction(txuuid) + with self.admin_access.client_cnx() as cnx: + c = cnx.create_entity('Card', title=u'hop', content=u'hop') + p = cnx.create_entity('Personne', nom=u'louis') + cnx.commit() + p.cw_set(fiche=c) + txuuid = cnx.commit() + c.cw_delete() + cnx.commit() + self.assertUndoTransaction(cnx, txuuid) def test_undo_inline_rel_replace_ok(self): """Undo changing relation Personne (?) fiche (?) Card Caution processed by `_undo_u` """ - session = self.session - c1 = session.create_entity('Card', title=u'hop', content=u'hop') - c2 = session.create_entity('Card', title=u'hip', content=u'hip') - p = session.create_entity('Personne', nom=u'louis', fiche=c1) - self.commit() - p.cw_set(fiche=c2) - txuuid = self.commit() - self.assertUndoTransaction(txuuid) - self.commit() - p.cw_clear_all_caches() - self.assertEqual(p.fiche[0].eid, c1.eid) + with self.admin_access.client_cnx() as cnx: + c1 = cnx.create_entity('Card', title=u'hop', content=u'hop') + c2 = cnx.create_entity('Card', title=u'hip', content=u'hip') + p = cnx.create_entity('Personne', nom=u'louis', fiche=c1) + cnx.commit() + p.cw_set(fiche=c2) + txuuid = cnx.commit() + self.assertUndoTransaction(cnx, txuuid) + cnx.commit() + p.cw_clear_all_caches() + self.assertEqual(p.fiche[0].eid, c1.eid) def test_undo_inline_rel_replace_ko(self): """Undo changing relation Personne (?) fiche (?) Card, with an error Caution processed by `_undo_u` """ - session = self.session - c1 = session.create_entity('Card', title=u'hop', content=u'hop') - c2 = session.create_entity('Card', title=u'hip', content=u'hip') - p = session.create_entity('Personne', nom=u'louis', fiche=c1) - self.commit() - p.cw_set(fiche=c2) - txuuid = self.commit() - c1.cw_delete() - self.commit() - self.assertUndoTransaction(txuuid, [ - "can't restore entity %s of type Personne, target of fiche (eid %s)" - " does not exist any longer" % (p.eid, c1.eid)]) - self.commit() - p.cw_clear_all_caches() - self.assertFalse(p.fiche) + with self.admin_access.client_cnx() as cnx: + c1 = cnx.create_entity('Card', title=u'hop', content=u'hop') + c2 = cnx.create_entity('Card', title=u'hip', content=u'hip') + p = cnx.create_entity('Personne', nom=u'louis', fiche=c1) + cnx.commit() + p.cw_set(fiche=c2) + txuuid = cnx.commit() + c1.cw_delete() + cnx.commit() + self.assertUndoTransaction(cnx, txuuid, [ + "can't restore entity %s of type Personne, target of fiche (eid %s)" + " does not exist any longer" % (p.eid, c1.eid)]) + cnx.commit() + p.cw_clear_all_caches() + self.assertFalse(p.fiche) def test_undo_attr_update_ok(self): - session = self.session - p = session.create_entity('Personne', nom=u'toto') - session.commit() - self.session.set_cnxset() - p.cw_set(nom=u'titi') - txuuid = self.commit() - self.assertUndoTransaction(txuuid) - p.cw_clear_all_caches() - self.assertEqual(p.nom, u'toto') + with self.admin_access.client_cnx() as cnx: + p = cnx.create_entity('Personne', nom=u'toto') + cnx.commit() + p.cw_set(nom=u'titi') + txuuid = cnx.commit() + self.assertUndoTransaction(cnx, txuuid) + p.cw_clear_all_caches() + self.assertEqual(p.nom, u'toto') def test_undo_attr_update_ko(self): - session = self.session - p = session.create_entity('Personne', nom=u'toto') - session.commit() - self.session.set_cnxset() - p.cw_set(nom=u'titi') - txuuid = self.commit() - p.cw_delete() - self.commit() - self.assertUndoTransaction(txuuid, [ - u"can't restore state of entity %s, it has been deleted inbetween" % p.eid]) + with self.admin_access.client_cnx() as cnx: + p = cnx.create_entity('Personne', nom=u'toto') + cnx.commit() + p.cw_set(nom=u'titi') + txuuid = cnx.commit() + p.cw_delete() + cnx.commit() + self.assertUndoTransaction(cnx, txuuid, [ + u"can't restore state of entity %s, it has been deleted inbetween" % p.eid]) class UndoExceptionInUnicode(CubicWebTC): diff -r 84738d495ffd -r 793377697c81 server/utils.py --- a/server/utils.py Wed Sep 24 17:35:59 2014 +0200 +++ b/server/utils.py Wed Sep 24 18:04:30 2014 +0200 @@ -72,32 +72,14 @@ # wrong password return '' -def cartesian_product(seqin): - """returns a generator which returns the cartesian product of `seqin` - for more details, see : - http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/302478 - """ - def rloop(seqin, comb): - """recursive looping function""" - if seqin: # any more sequences to process? - for item in seqin[0]: - newcomb = comb + [item] # add next item to current combination - # call rloop w/ remaining seqs, newcomb - for item in rloop(seqin[1:], newcomb): - yield item # seqs and newcomb - else: # processing last sequence - yield comb # comb finished, add to list - return rloop(seqin, []) - - -def eschema_eid(session, eschema): +def eschema_eid(cnx, eschema): """get eid of the CWEType entity for the given yams type. You should use this because when schema has been loaded from the file-system, not from the database, (e.g. during tests), eschema.eid is not set. """ if eschema.eid is None: - eschema.eid = session.execute( + eschema.eid = cnx.execute( 'Any X WHERE X is CWEType, X name %(name)s', {'name': str(eschema)})[0][0] return eschema.eid @@ -126,7 +108,7 @@ return user, passwd -_MARKER=object() +_MARKER = object() def func_name(func): name = getattr(func, '__name__', _MARKER) if name is _MARKER: diff -r 84738d495ffd -r 793377697c81 sobjects/cwxmlparser.py --- a/sobjects/cwxmlparser.py Wed Sep 24 17:35:59 2014 +0200 +++ b/sobjects/cwxmlparser.py Wed Sep 24 18:04:30 2014 +0200 @@ -31,8 +31,7 @@ """ -from datetime import datetime, timedelta, time -from urllib import urlencode +from datetime import datetime, time from cgi import parse_qs # in urlparse with python >= 2.6 from logilab.common.date import todate, totime @@ -57,7 +56,7 @@ DEFAULT_CONVERTERS['Date'] = convert_date def convert_datetime(ustr): if '.' in ustr: # assume %Y-%m-%d %H:%M:%S.mmmmmm - ustr = ustr.split('.',1)[0] + ustr = ustr.split('.', 1)[0] return datetime.strptime(ustr, '%Y-%m-%d %H:%M:%S') DEFAULT_CONVERTERS['Datetime'] = convert_datetime # XXX handle timezone, though this will be enough as TZDatetime are @@ -169,7 +168,7 @@ ttype = schemacfg.schema.stype.name etyperules = self.source.mapping.setdefault(etype, {}) etyperules.setdefault((rtype, role, action), []).append( - (ttype, options) ) + (ttype, options)) self.source.mapping_idx[schemacfg.eid] = ( etype, rtype, role, action, ttype) @@ -204,7 +203,7 @@ * `rels` is for relations and structured as {role: {relation: [(related item, related rels)...]} """ - entity = self.extid2entity(str(item['cwuri']), item['cwtype'], + entity = self.extid2entity(str(item['cwuri']), item['cwtype'], cwsource=item['cwsource'], item=item) if entity is None: return None @@ -432,7 +431,7 @@ self._related_link(ttype, others, searchattrs) def _related_link(self, ttype, others, searchattrs): - def issubset(x,y): + def issubset(x, y): return all(z in y for z in x) eids = [] # local eids log = self.parser.import_log @@ -468,7 +467,7 @@ self._clear_relation((ttype,)) def _find_entities(self, item, kwargs): - return tuple(self._cw.find_entities(item['cwtype'], **kwargs)) + return tuple(self._cw.find(item['cwtype'], **kwargs).entities()) class CWEntityXMLActionLinkInState(CWEntityXMLActionLink): diff -r 84738d495ffd -r 793377697c81 sobjects/ldapparser.py --- a/sobjects/ldapparser.py Wed Sep 24 17:35:59 2014 +0200 +++ b/sobjects/ldapparser.py Wed Sep 24 18:04:30 2014 +0200 @@ -92,9 +92,9 @@ for groupdict in self.group_source_entities_by_extid.itervalues(): self._process('CWGroup', groupdict) - def handle_deletion(self, config, session, myuris): + def handle_deletion(self, config, cnx, myuris): if config['delete-entities']: - super(DataFeedLDAPAdapter, self).handle_deletion(config, session, myuris) + super(DataFeedLDAPAdapter, self).handle_deletion(config, cnx, myuris) return if myuris: byetype = {} @@ -107,9 +107,9 @@ continue self.info('deactivate %s %s entities', len(eids), etype) for eid in eids: - wf = session.entity_from_eid(eid).cw_adapt_to('IWorkflowable') + wf = cnx.entity_from_eid(eid).cw_adapt_to('IWorkflowable') wf.fire_transition_if_possible('deactivate') - session.commit(free_cnxset=False) + cnx.commit() def update_if_necessary(self, entity, attrs): # disable read security to allow password selection @@ -142,9 +142,11 @@ try: tdict[tattr] = sdict[sattr] except KeyError: - raise ConfigurationError('source attribute %s is not present ' - 'in the source, please check the ' - '%s-attrs-map field' % + raise ConfigurationError('source attribute %s has not ' + 'been found in the source, ' + 'please check the %s-attrs-map ' + 'field and the permissions of ' + 'the LDAP binding user' % (sattr, etype[2:].lower())) return tdict @@ -168,7 +170,7 @@ etype = entity.cw_etype if etype == 'EmailAddress': return - # all CWUsers must be treated before CWGroups to have to in_group relation + # all CWUsers must be treated before CWGroups to have the in_group relation # set correctly in _associate_ldapusers elif etype == 'CWUser': groups = filter(None, [self._get_group(name) @@ -196,7 +198,7 @@ if not isinstance(emailaddrs, list): emailaddrs = [emailaddrs] for emailaddr in emailaddrs: - # search for existant email first, may be coming from another source + # search for existing email first, may be coming from another source rset = self._cw.execute('EmailAddress X WHERE X address %(addr)s', {'addr': emailaddr}) if not rset: diff -r 84738d495ffd -r 793377697c81 sobjects/notification.py --- a/sobjects/notification.py Wed Sep 24 17:35:59 2014 +0200 +++ b/sobjects/notification.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -30,7 +30,7 @@ from cubicweb.view import Component, EntityView from cubicweb.server.hook import SendMailOp from cubicweb.mail import construct_message_id, format_mail -from cubicweb.server.session import Session +from cubicweb.server.session import Session, InternalManager class RecipientsFinder(Component): @@ -115,56 +115,51 @@ msgid = None req = self._cw self.user_data = req.user_data() - origlang = req.lang for something in recipients: - if isinstance(something, Entity): - # hi-jack self._cw to get a session for the returned user - self._cw = Session(something, self._cw.repo) - self._cw.set_cnxset() + if isinstance(something, tuple): + emailaddr, lang = something + user = InternalManager(lang=lang) + else: emailaddr = something.cw_adapt_to('IEmailable').get_email() - else: - emailaddr, lang = something - self._cw.set_language(lang) - # since the same view (eg self) may be called multiple time and we - # need a fresh stream at each iteration, reset it explicitly - self.w = None - try: + user = something + # hi-jack self._cw to get a session for the returned user + session = Session(user, self._cw.repo) + with session.new_cnx() as cnx: + self._cw = cnx try: - # XXX forcing the row & col here may make the content and - # subject inconsistent because subject will depend on - # self.cw_row & self.cw_col if they are set. - content = self.render(row=0, col=0, **kwargs) - subject = self.subject() - except SkipEmail: - continue - except Exception as ex: - # shouldn't make the whole transaction fail because of rendering - # error (unauthorized or such) XXX check it doesn't actually - # occurs due to rollback on such error - self.exception(str(ex)) - continue - msg = format_mail(self.user_data, [emailaddr], content, subject, - config=self._cw.vreg.config, msgid=msgid, references=refs) - yield [emailaddr], msg - except: - if isinstance(something, Entity): - self._cw.rollback() - raise - else: - if isinstance(something, Entity): - self._cw.commit() - finally: - if isinstance(something, Entity): - self._cw.close() + # since the same view (eg self) may be called multiple time and we + # need a fresh stream at each iteration, reset it explicitly + self.w = None + try: + # XXX forcing the row & col here may make the content and + # subject inconsistent because subject will depend on + # self.cw_row & self.cw_col if they are set. + content = self.render(row=0, col=0, **kwargs) + subject = self.subject() + except SkipEmail: + continue + except Exception as ex: + # shouldn't make the whole transaction fail because of rendering + # error (unauthorized or such) XXX check it doesn't actually + # occurs due to rollback on such error + self.exception(str(ex)) + continue + msg = format_mail(self.user_data, [emailaddr], content, subject, + config=self._cw.vreg.config, msgid=msgid, references=refs) + yield [emailaddr], msg + finally: + # ensure we have a cnxset since commit will fail if there is + # some operation but no cnxset. This may occurs in this very + # specific case (eg SendMailOp) + with cnx.ensure_cnx_set: + cnx.commit() self._cw = req - # restore language - req.set_language(origlang) # recipients / email sending ############################################### def recipients(self): """return a list of either 2-uple (email, language) or user entity to - who this email should be sent + whom this email should be sent """ finder = self._cw.vreg['components'].select( 'recipients_finder', self._cw, rset=self.cw_rset, diff -r 84738d495ffd -r 793377697c81 sobjects/services.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/sobjects/services.py Wed Sep 24 18:04:30 2014 +0200 @@ -0,0 +1,158 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Define server side service provided by cubicweb""" + +import threading + +from yams.schema import role_name +from cubicweb import ValidationError +from cubicweb.server import Service +from cubicweb.predicates import match_user_groups, match_kwargs + +class StatsService(Service): + """Return a dictionary containing some statistics about the repository + resources usage. + """ + + __regid__ = 'repo_stats' + __select__ = match_user_groups('managers') + + def call(self): + repo = self._cw.repo # Service are repo side only. + results = {} + querier = repo.querier + source = repo.system_source + for size, maxsize, hits, misses, title in ( + (len(querier._rql_cache), repo.config['rql-cache-size'], + querier.cache_hit, querier.cache_miss, 'rqlt_st'), + (len(source._cache), repo.config['rql-cache-size'], + source.cache_hit, source.cache_miss, 'sql'), + ): + results['%s_cache_size' % title] = '%s / %s' % (size, maxsize) + results['%s_cache_hit' % title] = hits + results['%s_cache_miss' % title] = misses + results['%s_cache_hit_percent' % title] = (hits * 100) / (hits + misses) + results['type_source_cache_size'] = len(repo._type_source_cache) + results['extid_cache_size'] = len(repo._extid_cache) + results['sql_no_cache'] = repo.system_source.no_cache + results['nb_open_sessions'] = len(repo._sessions) + results['nb_active_threads'] = threading.activeCount() + looping_tasks = repo._tasks_manager._looping_tasks + results['looping_tasks'] = ', '.join(str(t) for t in looping_tasks) + results['available_cnxsets'] = repo._cnxsets_pool.qsize() + results['threads'] = ', '.join(sorted(str(t) for t in threading.enumerate())) + return results + +class GcStatsService(Service): + """Return a dictionary containing some statistics about the repository + resources usage. + """ + + __regid__ = 'repo_gc_stats' + __select__ = match_user_groups('managers') + + def call(self, nmax=20): + """Return a dictionary containing some statistics about the repository + memory usage. + + This is a public method, not requiring a session id. + + nmax is the max number of (most) referenced object returned as + the 'referenced' result + """ + + from cubicweb._gcdebug import gc_info + from cubicweb.appobject import AppObject + from cubicweb.rset import ResultSet + from cubicweb.dbapi import Connection, Cursor + from cubicweb.web.request import CubicWebRequestBase + from rql.stmts import Union + + lookupclasses = (AppObject, + Union, ResultSet, + Connection, Cursor, + CubicWebRequestBase) + try: + from cubicweb.server.session import Session, InternalSession + lookupclasses += (InternalSession, Session) + except ImportError: + pass # no server part installed + + results = {} + counters, ocounters, garbage = gc_info(lookupclasses, + viewreferrersclasses=()) + values = sorted(counters.iteritems(), key=lambda x: x[1], reverse=True) + results['lookupclasses'] = values + values = sorted(ocounters.iteritems(), key=lambda x: x[1], reverse=True)[:nmax] + results['referenced'] = values + results['unreachable'] = len(garbage) + return results + + +class RegisterUserService(Service): + """check if a user with the given login exists, if not create it with the + given password. This service is designed to be used for anonymous + registration on public web sites. + + To use it, do: + with self.appli.repo.internal_cnx() as cnx: + cnx.call_service('register_user', + login=login, + password=password, + **cwuserkwargs) + """ + __regid__ = 'register_user' + __select__ = Service.__select__ & match_kwargs('login', 'password') + default_groups = ('users',) + + def call(self, login, password, email=None, groups=None, **cwuserkwargs): + cnx = self._cw + errmsg = cnx._('the value "%s" is already used, use another one') + + if (cnx.execute('CWUser X WHERE X login %(login)s', {'login': login}, + build_descr=False) + or cnx.execute('CWUser X WHERE X use_email C, C address %(login)s', + {'login': login}, build_descr=False)): + qname = role_name('login', 'subject') + raise ValidationError(None, {qname: errmsg % login}) + + if isinstance(password, unicode): + # password should *always* be utf8 encoded + password = password.encode('UTF8') + cwuserkwargs['login'] = login + cwuserkwargs['upassword'] = password + # we have to create the user + user = cnx.create_entity('CWUser', **cwuserkwargs) + if groups is None: + groups = self.default_groups + assert groups, "CWUsers must belong to at least one CWGroup" + group_names = ', '.join('%r' % group for group in groups) + cnx.execute('SET X in_group G WHERE X eid %%(x)s, G name IN (%s)' % group_names, + {'x': user.eid}) + + if email or '@' in login: + d = {'login': login, 'email': email or login} + if cnx.execute('EmailAddress X WHERE X address %(email)s', d, + build_descr=False): + qname = role_name('address', 'subject') + raise ValidationError(None, {qname: errmsg % d['email']}) + cnx.execute('INSERT EmailAddress X: X address %(email)s, ' + 'U primary_email X, U use_email X ' + 'WHERE U login %(login)s', d, build_descr=False) + + return user diff -r 84738d495ffd -r 793377697c81 sobjects/supervising.py --- a/sobjects/supervising.py Wed Sep 24 17:35:59 2014 +0200 +++ b/sobjects/supervising.py Wed Sep 24 18:04:30 2014 +0200 @@ -142,16 +142,16 @@ self.w(u' %s' % entity.absolute_url()) def _relation_context(self, changedescr): - session = self._cw + cnx = self._cw def describe(eid): try: - return session._(session.describe(eid)[0]).lower() + return cnx._(cnx.entity_metas(eid)['type']).lower() except UnknownEid: # may occurs when an entity has been deleted from an external # source and we're cleaning its relation - return session._('unknown external entity') + return cnx._('unknown external entity') eidfrom, rtype, eidto = changedescr.eidfrom, changedescr.rtype, changedescr.eidto - return {'rtype': session._(rtype), + return {'rtype': cnx._(rtype), 'eidfrom': eidfrom, 'frometype': describe(eidfrom), 'eidto': eidto, @@ -171,16 +171,15 @@ of changes """ def _get_view(self): - return self.session.vreg['components'].select('supervision_notif', - self.session) + return self.cnx.vreg['components'].select('supervision_notif', self.cnx) def _prepare_email(self): - session = self.session - config = session.vreg.config + cnx = self.cnx + config = cnx.vreg.config uinfo = {'email': config['sender-addr'], 'name': config['sender-name']} view = self._get_view() - content = view.render(changes=session.transaction_data.get('pendingchanges')) + content = view.render(changes=cnx.transaction_data.get('pendingchanges')) recipients = view.recipients() msg = format_mail(uinfo, recipients, content, view.subject(), config=config) self.to_send = [(msg, recipients)] diff -r 84738d495ffd -r 793377697c81 sobjects/test/unittest_cwxmlparser.py --- a/sobjects/test/unittest_cwxmlparser.py Wed Sep 24 17:35:59 2014 +0200 +++ b/sobjects/test/unittest_cwxmlparser.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2011-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2011-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -132,13 +132,14 @@ REMOVE THE DATABASE TEMPLATE else it won't be considered """ test_db_id = 'xmlparser' + @classmethod - def pre_setup_database(cls, session, config): - myfeed = session.create_entity('CWSource', name=u'myfeed', type=u'datafeed', + def pre_setup_database(cls, cnx, config): + myfeed = cnx.create_entity('CWSource', name=u'myfeed', type=u'datafeed', parser=u'cw.entityxml', url=BASEXML) - myotherfeed = session.create_entity('CWSource', name=u'myotherfeed', type=u'datafeed', - parser=u'cw.entityxml', url=OTHERXML) - session.commit() + myotherfeed = cnx.create_entity('CWSource', name=u'myotherfeed', type=u'datafeed', + parser=u'cw.entityxml', url=OTHERXML) + cnx.commit() myfeed.init_mapping([(('CWUser', 'use_email', '*'), u'role=subject\naction=copy'), (('CWUser', 'in_group', '*'), @@ -153,21 +154,23 @@ (('CWUser', 'in_state', '*'), u'role=subject\naction=link\nlinkattr=name'), ]) - session.create_entity('Tag', name=u'hop') + cnx.create_entity('Tag', name=u'hop') + cnx.commit() def test_complete_url(self): dfsource = self.repo.sources_by_uri['myfeed'] - parser = dfsource._get_parser(self.session) - self.assertEqual(parser.complete_url('http://www.cubicweb.org/CWUser'), - 'http://www.cubicweb.org/CWUser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject') - self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser'), - 'http://www.cubicweb.org/cwuser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject') - self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser?vid=rdf&relation=hop'), - 'http://www.cubicweb.org/cwuser?relation=hop&relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject&vid=rdf') - self.assertEqual(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&vid=rdf&relation=hop'), - 'http://www.cubicweb.org/?rql=cwuser&relation=hop&vid=rdf') - self.assertEqual(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&relation=hop'), - 'http://www.cubicweb.org/?rql=cwuser&relation=hop') + with self.admin_access.repo_cnx() as cnx: + parser = dfsource._get_parser(cnx) + self.assertEqual(parser.complete_url('http://www.cubicweb.org/CWUser'), + 'http://www.cubicweb.org/CWUser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject') + self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser'), + 'http://www.cubicweb.org/cwuser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject') + self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser?vid=rdf&relation=hop'), + 'http://www.cubicweb.org/cwuser?relation=hop&relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject&vid=rdf') + self.assertEqual(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&vid=rdf&relation=hop'), + 'http://www.cubicweb.org/?rql=cwuser&relation=hop&vid=rdf') + self.assertEqual(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&relation=hop'), + 'http://www.cubicweb.org/?rql=cwuser&relation=hop') def test_actions(self): @@ -192,113 +195,105 @@ (u'Tag', {u'linkattr': u'name'})], }, }) - session = self.repo.internal_session(safe=True) - stats = dfsource.pull_data(session, force=True, raise_on_error=True) - self.assertEqual(sorted(stats), ['checked', 'created', 'updated']) - self.assertEqual(len(stats['created']), 2) - self.assertEqual(stats['updated'], set()) + with self.repo.internal_cnx() as cnx: + stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) + self.assertEqual(sorted(stats), ['checked', 'created', 'updated']) + self.assertEqual(len(stats['created']), 2) + self.assertEqual(stats['updated'], set()) - user = self.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0) - self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59)) - self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06)) - self.assertEqual(user.cwuri, 'http://pouet.org/5') - self.assertEqual(user.cw_source[0].name, 'myfeed') - self.assertEqual(user.absolute_url(), 'http://pouet.org/5') - self.assertEqual(len(user.use_email), 1) - # copy action - email = user.use_email[0] - self.assertEqual(email.address, 'syt@logilab.fr') - self.assertEqual(email.cwuri, 'http://pouet.org/6') - self.assertEqual(email.absolute_url(), 'http://pouet.org/6') - self.assertEqual(email.cw_source[0].name, 'myfeed') - self.assertEqual(len(email.reverse_tags), 1) - self.assertEqual(email.reverse_tags[0].name, 'hop') - # link action - self.assertFalse(self.execute('CWGroup X WHERE X name "unknown"')) - groups = sorted([g.name for g in user.in_group]) - self.assertEqual(groups, ['users']) - group = user.in_group[0] - self.assertEqual(len(group.reverse_tags), 1) - self.assertEqual(group.reverse_tags[0].name, 'hop') - # link or create action - tags = set([(t.name, t.cwuri.replace(str(t.eid), ''), t.cw_source[0].name) - for t in user.reverse_tags]) - self.assertEqual(tags, set((('hop', 'http://testing.fr/cubicweb/', 'system'), - ('unknown', 'http://testing.fr/cubicweb/', 'system'))) - ) - session.set_cnxset() - with session.security_enabled(read=False): # avoid Unauthorized due to password selection - stats = dfsource.pull_data(session, force=True, raise_on_error=True) - self.assertEqual(stats['created'], set()) - self.assertEqual(len(stats['updated']), 0) - self.assertEqual(len(stats['checked']), 2) - self.repo._type_source_cache.clear() - self.repo._extid_cache.clear() - session.set_cnxset() - with session.security_enabled(read=False): # avoid Unauthorized due to password selection - stats = dfsource.pull_data(session, force=True, raise_on_error=True) - self.assertEqual(stats['created'], set()) - self.assertEqual(len(stats['updated']), 0) - self.assertEqual(len(stats['checked']), 2) - session.commit() + with self.admin_access.web_request() as req: + user = req.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0) + self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59)) + self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06)) + self.assertEqual(user.cwuri, 'http://pouet.org/5') + self.assertEqual(user.cw_source[0].name, 'myfeed') + self.assertEqual(user.absolute_url(), 'http://pouet.org/5') + self.assertEqual(len(user.use_email), 1) + # copy action + email = user.use_email[0] + self.assertEqual(email.address, 'syt@logilab.fr') + self.assertEqual(email.cwuri, 'http://pouet.org/6') + self.assertEqual(email.absolute_url(), 'http://pouet.org/6') + self.assertEqual(email.cw_source[0].name, 'myfeed') + self.assertEqual(len(email.reverse_tags), 1) + self.assertEqual(email.reverse_tags[0].name, 'hop') + # link action + self.assertFalse(req.execute('CWGroup X WHERE X name "unknown"')) + groups = sorted([g.name for g in user.in_group]) + self.assertEqual(groups, ['users']) + group = user.in_group[0] + self.assertEqual(len(group.reverse_tags), 1) + self.assertEqual(group.reverse_tags[0].name, 'hop') + # link or create action + tags = set([(t.name, t.cwuri.replace(str(t.eid), ''), t.cw_source[0].name) + for t in user.reverse_tags]) + self.assertEqual(tags, set((('hop', 'http://testing.fr/cubicweb/', 'system'), + ('unknown', 'http://testing.fr/cubicweb/', 'system'))) + ) + with self.repo.internal_cnx() as cnx: + stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) + self.assertEqual(stats['created'], set()) + self.assertEqual(len(stats['updated']), 0) + self.assertEqual(len(stats['checked']), 2) + self.repo._type_source_cache.clear() + self.repo._extid_cache.clear() + stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) + self.assertEqual(stats['created'], set()) + self.assertEqual(len(stats['updated']), 0) + self.assertEqual(len(stats['checked']), 2) - # test move to system source - self.sexecute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': email.eid}) - self.commit() - rset = self.sexecute('EmailAddress X WHERE X address "syt@logilab.fr"') - self.assertEqual(len(rset), 1) - e = rset.get_entity(0, 0) - self.assertEqual(e.eid, email.eid) - self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system', - 'use-cwuri-as-url': False}, - 'type': 'EmailAddress', - 'extid': None}) - self.assertEqual(e.cw_source[0].name, 'system') - self.assertEqual(e.reverse_use_email[0].login, 'sthenault') - self.commit() - # test everything is still fine after source synchronization - session.set_cnxset() - with session.security_enabled(read=False): # avoid Unauthorized due to password selection - stats = dfsource.pull_data(session, force=True, raise_on_error=True) - rset = self.sexecute('EmailAddress X WHERE X address "syt@logilab.fr"') - self.assertEqual(len(rset), 1) - e = rset.get_entity(0, 0) - self.assertEqual(e.eid, email.eid) - self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system', - 'use-cwuri-as-url': False}, - 'type': 'EmailAddress', - 'extid': None}) - self.assertEqual(e.cw_source[0].name, 'system') - self.assertEqual(e.reverse_use_email[0].login, 'sthenault') - session.commit() + # test move to system source + cnx.execute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': email.eid}) + cnx.commit() + rset = cnx.execute('EmailAddress X WHERE X address "syt@logilab.fr"') + self.assertEqual(len(rset), 1) + e = rset.get_entity(0, 0) + self.assertEqual(e.eid, email.eid) + self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system', + 'use-cwuri-as-url': False}, + 'type': 'EmailAddress', + 'extid': None}) + self.assertEqual(e.cw_source[0].name, 'system') + self.assertEqual(e.reverse_use_email[0].login, 'sthenault') + # test everything is still fine after source synchronization + stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) + rset = cnx.execute('EmailAddress X WHERE X address "syt@logilab.fr"') + self.assertEqual(len(rset), 1) + e = rset.get_entity(0, 0) + self.assertEqual(e.eid, email.eid) + self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system', + 'use-cwuri-as-url': False}, + 'type': 'EmailAddress', + 'extid': None}) + self.assertEqual(e.cw_source[0].name, 'system') + self.assertEqual(e.reverse_use_email[0].login, 'sthenault') + cnx.commit() - # test delete entity - e.cw_delete() - self.commit() - # test everything is still fine after source synchronization - session.set_cnxset() - with session.security_enabled(read=False): # avoid Unauthorized due to password selection - stats = dfsource.pull_data(session, force=True, raise_on_error=True) - rset = self.sexecute('EmailAddress X WHERE X address "syt@logilab.fr"') - self.assertEqual(len(rset), 0) - rset = self.sexecute('Any X WHERE X use_email E, X login "sthenault"') - self.assertEqual(len(rset), 0) + # test delete entity + e.cw_delete() + cnx.commit() + # test everything is still fine after source synchronization + stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) + rset = cnx.execute('EmailAddress X WHERE X address "syt@logilab.fr"') + self.assertEqual(len(rset), 0) + rset = cnx.execute('Any X WHERE X use_email E, X login "sthenault"') + self.assertEqual(len(rset), 0) def test_external_entity(self): dfsource = self.repo.sources_by_uri['myotherfeed'] - session = self.repo.internal_session(safe=True) - stats = dfsource.pull_data(session, force=True, raise_on_error=True) - user = self.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0) - self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59)) - self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06)) - self.assertEqual(user.cwuri, 'http://pouet.org/5') - self.assertEqual(user.cw_source[0].name, 'myfeed') + with self.repo.internal_cnx() as cnx: + stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) + user = cnx.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0) + self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59)) + self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06)) + self.assertEqual(user.cwuri, 'http://pouet.org/5') + self.assertEqual(user.cw_source[0].name, 'myfeed') def test_noerror_missing_fti_attribute(self): dfsource = self.repo.sources_by_uri['myfeed'] - session = self.repo.internal_session(safe=True) - parser = dfsource._get_parser(session) - dfsource.process_urls(parser, [''' + with self.repo.internal_cnx() as cnx: + parser = dfsource._get_parser(cnx) + dfsource.process_urls(parser, [''' how-to @@ -308,9 +303,9 @@ def test_noerror_unspecified_date(self): dfsource = self.repo.sources_by_uri['myfeed'] - session = self.repo.internal_session(safe=True) - parser = dfsource._get_parser(session) - dfsource.process_urls(parser, [''' + with self.repo.internal_cnx() as cnx: + parser = dfsource._get_parser(cnx) + dfsource.process_urls(parser, [''' how-to diff -r 84738d495ffd -r 793377697c81 sobjects/test/unittest_email.py --- a/sobjects/test/unittest_email.py Wed Sep 24 17:35:59 2014 +0200 +++ b/sobjects/test/unittest_email.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -15,9 +15,6 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" - -""" from cubicweb import Unauthorized from cubicweb.devtools.testlib import CubicWebTC @@ -25,45 +22,46 @@ class EmailAddressHooksTC(CubicWebTC): def test_use_email_set_primary_email(self): - self.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U use_email X WHERE U login "admin"') - self.assertEqual(self.execute('Any A WHERE U primary_email X, U login "admin", X address A').rows, - []) - self.commit() - self.assertEqual(self.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0], - 'admin@logilab.fr') - # having another email should'nt change anything - self.execute('INSERT EmailAddress X: X address "a@logilab.fr", U use_email X WHERE U login "admin"') - self.commit() - self.assertEqual(self.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0], - 'admin@logilab.fr') + with self.admin_access.client_cnx() as cnx: + cnx.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U use_email X WHERE U login "admin"') + self.assertEqual(cnx.execute('Any A WHERE U primary_email X, U login "admin", X address A').rows, + []) + cnx.commit() + self.assertEqual(cnx.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0], + 'admin@logilab.fr') + # having another email should'nt change anything + cnx.execute('INSERT EmailAddress X: X address "a@logilab.fr", U use_email X WHERE U login "admin"') + cnx.commit() + self.assertEqual(cnx.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0], + 'admin@logilab.fr') def test_primary_email_set_use_email(self): - self.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X WHERE U login "admin"') - self.assertEqual(self.execute('Any A WHERE U use_email X, U login "admin", X address A').rows, - []) - self.commit() - self.assertEqual(self.execute('Any A WHERE U use_email X, U login "admin", X address A')[0][0], - 'admin@logilab.fr') + with self.admin_access.client_cnx() as cnx: + cnx.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X WHERE U login "admin"') + self.assertEqual(cnx.execute('Any A WHERE U use_email X, U login "admin", X address A').rows, + []) + cnx.commit() + self.assertEqual(cnx.execute('Any A WHERE U use_email X, U login "admin", X address A')[0][0], + 'admin@logilab.fr') def test_cardinality_check(self): - email1 = self.execute('INSERT EmailAddress E: E address "client@client.com", U use_email E WHERE U login "admin"')[0][0] - self.commit() - self.execute('SET U primary_email E WHERE U login "anon", E address "client@client.com"') - self.commit() - rset = self.execute('Any X WHERE X use_email E, E eid %(e)s', {'e': email1}) - self.assertFalse(rset.rowcount != 1, rset) + with self.admin_access.client_cnx() as cnx: + email1 = cnx.execute('INSERT EmailAddress E: E address "client@client.com", U use_email E WHERE U login "admin"')[0][0] + cnx.commit() + cnx.execute('SET U primary_email E WHERE U login "anon", E address "client@client.com"') + cnx.commit() + rset = cnx.execute('Any X WHERE X use_email E, E eid %(e)s', {'e': email1}) + self.assertFalse(rset.rowcount != 1, rset) def test_security_check(self): - req = self.request() - self.create_user(req, 'toto') - email1 = self.execute('INSERT EmailAddress E: E address "client@client.com", U use_email E WHERE U login "admin"')[0][0] - self.commit() - cnx = self.login('toto') - cu = cnx.cursor() - self.assertRaises(Unauthorized, - cu.execute, 'SET U primary_email E WHERE E eid %(e)s, U login "toto"', - {'e': email1}) - cnx.close() + with self.admin_access.client_cnx() as cnx: + self.create_user(cnx, 'toto') + email1 = cnx.execute('INSERT EmailAddress E: E address "client@client.com", U use_email E WHERE U login "admin"')[0][0] + cnx.commit() + with self.new_access('toto').client_cnx() as cnx: + self.assertRaises(Unauthorized, + cnx.execute, 'SET U primary_email E WHERE E eid %(e)s, U login "toto"', + {'e': email1}) if __name__ == '__main__': from logilab.common.testlib import unittest_main diff -r 84738d495ffd -r 793377697c81 sobjects/test/unittest_notification.py --- a/sobjects/test/unittest_notification.py Wed Sep 24 17:35:59 2014 +0200 +++ b/sobjects/test/unittest_notification.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ # -*- coding: iso-8859-1 -*- -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -16,9 +16,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" -""" from socket import gethostname from logilab.common.testlib import unittest_main, TestCase @@ -31,16 +29,16 @@ msgid1 = construct_message_id('testapp', 21) msgid2 = construct_message_id('testapp', 21) self.assertNotEqual(msgid1, msgid2) - self.assertFalse('&' in msgid1) - self.assertFalse('=' in msgid1) - self.assertFalse('/' in msgid1) - self.assertFalse('+' in msgid1) + self.assertNotIn('&', msgid1) + self.assertNotIn('=', msgid1) + self.assertNotIn('/', msgid1) + self.assertNotIn('+', msgid1) values = parse_message_id(msgid1, 'testapp') self.assertTrue(values) # parse_message_id should work with or without surrounding <> self.assertEqual(values, parse_message_id(msgid1[1:-1], 'testapp')) self.assertEqual(values['eid'], '21') - self.assertTrue('timestamp' in values) + self.assertIn('timestamp', values) self.assertEqual(parse_message_id(msgid1[1:-1], 'anotherapp'), None) def test_notimestamp(self): @@ -63,33 +61,34 @@ class NotificationTC(CubicWebTC): def test_recipients_finder(self): - urset = self.execute('CWUser X WHERE X login "admin"') - self.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X ' - 'WHERE U eid %(x)s', {'x': urset[0][0]}) - self.execute('INSERT CWProperty X: X pkey "ui.language", X value "fr", X for_user U ' - 'WHERE U eid %(x)s', {'x': urset[0][0]}) - self.commit() # commit so that admin get its properties updated - finder = self.vreg['components'].select('recipients_finder', - self.request(), rset=urset) - self.set_option('default-recipients-mode', 'none') - self.assertEqual(finder.recipients(), []) - self.set_option('default-recipients-mode', 'users') - self.assertEqual(finder.recipients(), [(u'admin@logilab.fr', 'fr')]) - self.set_option('default-recipients-mode', 'default-dest-addrs') - self.set_option('default-dest-addrs', 'abcd@logilab.fr, efgh@logilab.fr') - self.assertEqual(finder.recipients(), [('abcd@logilab.fr', 'en'), ('efgh@logilab.fr', 'en')]) + with self.admin_access.web_request() as req: + urset = req.execute('CWUser X WHERE X login "admin"') + req.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X ' + 'WHERE U eid %(x)s', {'x': urset[0][0]}) + req.execute('INSERT CWProperty X: X pkey "ui.language", X value "fr", X for_user U ' + 'WHERE U eid %(x)s', {'x': urset[0][0]}) + req.cnx.commit() # commit so that admin get its properties updated + finder = self.vreg['components'].select('recipients_finder', + req, rset=urset) + self.set_option('default-recipients-mode', 'none') + self.assertEqual(finder.recipients(), []) + self.set_option('default-recipients-mode', 'users') + self.assertEqual(finder.recipients(), [(u'admin@logilab.fr', 'fr')]) + self.set_option('default-recipients-mode', 'default-dest-addrs') + self.set_option('default-dest-addrs', 'abcd@logilab.fr, efgh@logilab.fr') + self.assertEqual(finder.recipients(), [('abcd@logilab.fr', 'en'), ('efgh@logilab.fr', 'en')]) def test_status_change_view(self): - req = self.request() - u = self.create_user(req, 'toto') - iwfable = u.cw_adapt_to('IWorkflowable') - iwfable.fire_transition('deactivate', comment=u'yeah') - self.assertFalse(MAILBOX) - self.commit() - self.assertEqual(len(MAILBOX), 1) - email = MAILBOX[0] - self.assertEqual(email.content, - ''' + with self.admin_access.web_request() as req: + u = self.create_user(req, 'toto') + iwfable = u.cw_adapt_to('IWorkflowable') + iwfable.fire_transition('deactivate', comment=u'yeah') + self.assertFalse(MAILBOX) + req.cnx.commit() + self.assertEqual(len(MAILBOX), 1) + email = MAILBOX[0] + self.assertEqual(email.content, + ''' admin changed status from to for entity 'toto' @@ -97,8 +96,8 @@ url: http://testing.fr/cubicweb/cwuser/toto ''') - self.assertEqual(email.subject, - 'status changed CWUser #%s (admin)' % u.eid) + self.assertEqual(email.subject, + 'status changed CWUser #%s (admin)' % u.eid) if __name__ == '__main__': unittest_main() diff -r 84738d495ffd -r 793377697c81 sobjects/test/unittest_register_user.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/sobjects/test/unittest_register_user.py Wed Sep 24 18:04:30 2014 +0200 @@ -0,0 +1,95 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unittest for cubicweb.dbapi""" + +from cubicweb import ValidationError +from cubicweb.web import Unauthorized +from cubicweb.devtools.testlib import CubicWebTC + + +class RegisterUserTC(CubicWebTC): + + def test_register_user_service(self): + acc = self.admin_access + with acc.client_cnx() as cnx: + cnx.call_service('register_user', login=u'foo1', password=u'bar1', + email=u'foo1@bar1.com', firstname=u'Foo1', + surname=u'Bar1') + + acc = self.new_access('anon') + with acc.client_cnx() as cnx: + self.assertRaises(Unauthorized, cnx.call_service, 'register_user', + login=u'foo2', password=u'bar2', + email=u'foo2@bar2.com', firstname=u'Foo2', surname=u'Bar2') + + with self.repo.internal_cnx() as cnx: + cnx.call_service('register_user', login=u'foo3', + password=u'bar3', email=u'foo3@bar3.com', + firstname=u'Foo3', surname=u'Bar3') + # same login + with self.assertRaises(ValidationError): + cnx.call_service('register_user', login=u'foo3', + password=u'bar3') + + def test_register_user_attributes(self): + with self.repo.internal_cnx() as cnx: + cnx.call_service('register_user', login=u'foo3', + password=u'bar3', email=u'foo3@bar3.com', + firstname=u'Foo3', surname=u'Bar3') + cnx.commit() + + with self.admin_access.client_cnx() as cnx: + user = cnx.find('CWUser', login=u'foo3').one() + self.assertEqual(user.firstname, u'Foo3') + self.assertEqual(user.use_email[0].address, u'foo3@bar3.com') + + def test_register_user_groups(self): + with self.repo.internal_cnx() as cnx: + # default + cnx.call_service('register_user', login=u'foo_user', + password=u'bar_user', email=u'foo_user@bar_user.com', + firstname=u'Foo_user', surname=u'Bar_user') + + # group kwarg + cnx.call_service('register_user', login=u'foo_admin', + password=u'bar_admin', email=u'foo_admin@bar_admin.com', + firstname=u'Foo_admin', surname=u'Bar_admin', + groups=('managers', 'users')) + + # class attribute + from cubicweb.sobjects import services + services.RegisterUserService.default_groups = ('guests',) + cnx.call_service('register_user', login=u'foo_guest', + password=u'bar_guest', email=u'foo_guest@bar_guest.com', + firstname=u'Foo_guest', surname=u'Bar_guest') + cnx.commit() + + with self.admin_access.client_cnx() as cnx: + user = cnx.find('CWUser', login=u'foo_user').one() + self.assertEqual([g.name for g in user.in_group], ['users']) + + admin = cnx.find('CWUser', login=u'foo_admin').one() + self.assertEqual(sorted(g.name for g in admin.in_group), ['managers', 'users']) + + guest = cnx.find('CWUser', login=u'foo_guest').one() + self.assertEqual([g.name for g in guest.in_group], ['guests']) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 84738d495ffd -r 793377697c81 sobjects/test/unittest_supervising.py --- a/sobjects/test/unittest_supervising.py Wed Sep 24 17:35:59 2014 +0200 +++ b/sobjects/test/unittest_supervising.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ # -*- coding: iso-8859-1 -*- -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -16,9 +16,6 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" - -""" import re from logilab.common.testlib import unittest_main @@ -30,38 +27,38 @@ class SupervisingTC(CubicWebTC): def setup_database(self): - req = self.request() - req.create_entity('Card', title=u"une news !", content=u"cubicweb c'est beau") - req.create_entity('Card', title=u"une autre news !", content=u"cubicweb c'est beau") - req.create_entity('Bookmark', title=u"un signet !", path=u"view?vid=index") - req.create_entity('Comment', content=u"Yo !") - self.execute('SET C comments B WHERE B title "une autre news !", C content "Yo !"') + with self.admin_access.client_cnx() as cnx: + cnx.create_entity('Card', title=u"une news !", content=u"cubicweb c'est beau") + card = cnx.create_entity('Card', title=u"une autre news !", content=u"cubicweb c'est beau") + cnx.create_entity('Bookmark', title=u"un signet !", path=u"view?vid=index") + cnx.create_entity('Comment', content=u"Yo !", comments=card) + cnx.commit() self.vreg.config.global_set_option('supervising-addrs', 'test@logilab.fr') def test_supervision(self): # do some modification - user = self.execute('INSERT CWUser X: X login "toto", X upassword "sosafe", X in_group G ' - 'WHERE G name "users"').get_entity(0, 0) - self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': user.eid}) - self.execute('DELETE Card B WHERE B title "une news !"') - self.execute('SET X bookmarked_by U WHERE X is Bookmark, U eid %(x)s', {'x': user.eid}) - self.execute('SET X content "duh?" WHERE X is Comment') - self.execute('DELETE X comments Y WHERE Y is Card, Y title "une autre news !"') - # check only one supervision email operation - session = self.session - sentops = [op for op in session.pending_operations - if isinstance(op, SupervisionMailOp)] - self.assertEqual(len(sentops), 1) - # check view content - op = sentops[0] - view = sentops[0]._get_view() - self.assertEqual(view.recipients(), ['test@logilab.fr']) - self.assertEqual(view.subject(), '[data supervision] changes summary') - data = view.render(changes=session.transaction_data.get('pendingchanges')).strip() - data = re.sub('#\d+', '#EID', data) - data = re.sub('/\d+', '/EID', data) - self.assertMultiLineEqual('''user admin has made the following change(s): + with self.admin_access.repo_cnx() as cnx: + user = cnx.execute('INSERT CWUser X: X login "toto", X upassword "sosafe", X in_group G ' + 'WHERE G name "users"').get_entity(0, 0) + cnx.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': user.eid}) + cnx.execute('DELETE Card B WHERE B title "une news !"') + cnx.execute('SET X bookmarked_by U WHERE X is Bookmark, U eid %(x)s', {'x': user.eid}) + cnx.execute('SET X content "duh?" WHERE X is Comment') + cnx.execute('DELETE Comment C WHERE C comments Y, Y is Card, Y title "une autre news !"') + # check only one supervision email operation + sentops = [op for op in cnx.pending_operations + if isinstance(op, SupervisionMailOp)] + self.assertEqual(len(sentops), 1) + # check view content + op = sentops[0] + view = sentops[0]._get_view() + self.assertEqual(view.recipients(), ['test@logilab.fr']) + self.assertEqual(view.subject(), '[data supervision] changes summary') + data = view.render(changes=cnx.transaction_data.get('pendingchanges')).strip() + data = re.sub('#\d+', '#EID', data) + data = re.sub('/\d+', '/EID', data) + self.assertMultiLineEqual('''user admin has made the following change(s): * added cwuser #EID (toto) http://testing.fr/cubicweb/cwuser/toto @@ -75,26 +72,26 @@ * updated comment #EID (duh?) http://testing.fr/cubicweb/comment/EID -* deleted relation comments from comment #EID to card #EID''', +* deleted comment #EID (duh?)''', data) - # check prepared email - op._prepare_email() - self.assertEqual(len(op.to_send), 1) - self.assert_(op.to_send[0][0]) - self.assertEqual(op.to_send[0][1], ['test@logilab.fr']) - self.commit() - # some other changes ####### - user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') - sentops = [op for op in session.pending_operations - if isinstance(op, SupervisionMailOp)] - self.assertEqual(len(sentops), 1) - # check view content - op = sentops[0] - view = sentops[0]._get_view() - data = view.render(changes=session.transaction_data.get('pendingchanges')).strip() - data = re.sub('#\d+', '#EID', data) - data = re.sub('/\d+', '/EID', data) - self.assertMultiLineEqual('''user admin has made the following change(s): + # check prepared email + op._prepare_email() + self.assertEqual(len(op.to_send), 1) + self.assert_(op.to_send[0][0]) + self.assertEqual(op.to_send[0][1], ['test@logilab.fr']) + cnx.commit() + # some other changes ####### + user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') + sentops = [op for op in cnx.pending_operations + if isinstance(op, SupervisionMailOp)] + self.assertEqual(len(sentops), 1) + # check view content + op = sentops[0] + view = sentops[0]._get_view() + data = view.render(changes=cnx.transaction_data.get('pendingchanges')).strip() + data = re.sub('#\d+', '#EID', data) + data = re.sub('/\d+', '/EID', data) + self.assertMultiLineEqual('''user admin has made the following change(s): * changed state of cwuser #EID (toto) from state activated to state deactivated @@ -102,10 +99,10 @@ data) def test_nonregr1(self): - session = self.session - # do some unlogged modification - self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': session.user.eid}) - self.commit() # no crash + with self.admin_access.repo_cnx() as cnx: + # do some unlogged modification + cnx.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': cnx.user.eid}) + cnx.commit() # no crash if __name__ == '__main__': diff -r 84738d495ffd -r 793377697c81 test/data/schema.py --- a/test/data/schema.py Wed Sep 24 17:35:59 2014 +0200 +++ b/test/data/schema.py Wed Sep 24 18:04:30 2014 +0200 @@ -89,3 +89,8 @@ class StateFull(WorkflowableEntityType): name = String() + + +class Reference(EntityType): + nom = String(unique=True) + ean = String(unique=True, required=True) diff -r 84738d495ffd -r 793377697c81 test/unittest_cwconfig.py --- a/test/unittest_cwconfig.py Wed Sep 24 17:35:59 2014 +0200 +++ b/test/unittest_cwconfig.py Wed Sep 24 18:04:30 2014 +0200 @@ -123,7 +123,7 @@ self.assertEqual(self.config.cubes_search_path(), [CUSTOM_CUBES_DIR, self.config.CUBES_DIR]) - self.assertTrue('mycube' in self.config.available_cubes()) + self.assertIn('mycube', self.config.available_cubes()) # test cubes python path self.config.adjust_sys_path() import cubes diff -r 84738d495ffd -r 793377697c81 test/unittest_cwctl.py --- a/test/unittest_cwctl.py Wed Sep 24 17:35:59 2014 +0200 +++ b/test/unittest_cwctl.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -43,21 +43,22 @@ class CubicWebShellTC(CubicWebTC): def test_process_script_args_context(self): - repo = self.cnx._repo - mih = ServerMigrationHelper(None, repo=repo, cnx=self.cnx, - interactive=False, - # hack so it don't try to load fs schema - schema=1) - scripts = {'script1.py': list(), - 'script2.py': ['-v'], - 'script3.py': ['-vd', '-f', 'FILE.TXT'], - } - mih.cmd_process_script(join(self.datadir, 'scripts', 'script1.py'), - funcname=None) - for script, args in scripts.items(): - scriptname = os.path.join(self.datadir, 'scripts', script) - self.assert_(os.path.exists(scriptname)) - mih.cmd_process_script(scriptname, None, scriptargs=args) + repo = self.repo + with self.admin_access.client_cnx() as cnx: + mih = ServerMigrationHelper(None, repo=repo, cnx=cnx, + interactive=False, + # hack so it don't try to load fs schema + schema=1) + scripts = {'script1.py': list(), + 'script2.py': ['-v'], + 'script3.py': ['-vd', '-f', 'FILE.TXT'], + } + mih.cmd_process_script(join(self.datadir, 'scripts', 'script1.py'), + funcname=None) + for script, args in scripts.items(): + scriptname = os.path.join(self.datadir, 'scripts', script) + self.assert_(os.path.exists(scriptname)) + mih.cmd_process_script(scriptname, None, scriptargs=args) if __name__ == '__main__': diff -r 84738d495ffd -r 793377697c81 test/unittest_dbapi.py --- a/test/unittest_dbapi.py Wed Sep 24 17:35:59 2014 +0200 +++ b/test/unittest_dbapi.py Wed Sep 24 18:04:30 2014 +0200 @@ -22,41 +22,43 @@ from logilab.common import tempattr from cubicweb import ConnectionError, cwconfig, NoSelectableObject -from cubicweb.dbapi import ProgrammingError +from cubicweb.dbapi import ProgrammingError, _repo_connect from cubicweb.devtools.testlib import CubicWebTC class DBAPITC(CubicWebTC): def test_public_repo_api(self): - cnx = self.login('anon') + cnx = _repo_connect(self.repo, login='anon', password='anon') self.assertEqual(cnx.get_schema(), self.repo.schema) self.assertEqual(cnx.source_defs(), {'system': {'type': 'native', 'uri': 'system', 'use-cwuri-as-url': False}}) - self.restore_connection() # proper way to close cnx + cnx.close() self.assertRaises(ProgrammingError, cnx.get_schema) self.assertRaises(ProgrammingError, cnx.source_defs) def test_db_api(self): - cnx = self.login('anon') + cnx = _repo_connect(self.repo, login='anon', password='anon') self.assertEqual(cnx.rollback(), None) self.assertEqual(cnx.commit(), None) - self.restore_connection() # proper way to close cnx - #self.assertEqual(cnx.close(), None) + cnx.close() self.assertRaises(ProgrammingError, cnx.rollback) self.assertRaises(ProgrammingError, cnx.commit) self.assertRaises(ProgrammingError, cnx.close) def test_api(self): - cnx = self.login('anon') + cnx = _repo_connect(self.repo, login='anon', password='anon') self.assertEqual(cnx.user(None).login, 'anon') + self.assertEqual({'type': u'CWSource', 'source': u'system', 'extid': None}, + cnx.entity_metas(1)) self.assertEqual(cnx.describe(1), (u'CWSource', u'system', None)) - self.restore_connection() # proper way to close cnx + cnx.close() self.assertRaises(ProgrammingError, cnx.user, None) + self.assertRaises(ProgrammingError, cnx.entity_metas, 1) self.assertRaises(ProgrammingError, cnx.describe, 1) def test_shared_data_api(self): - cnx = self.login('anon') + cnx = _repo_connect(self.repo, login='anon', password='anon') self.assertEqual(cnx.get_shared_data('data'), None) cnx.set_shared_data('data', 4) self.assertEqual(cnx.get_shared_data('data'), 4) @@ -65,16 +67,17 @@ self.assertEqual(cnx.get_shared_data('data'), None) cnx.set_shared_data('data', 4) self.assertEqual(cnx.get_shared_data('data'), 4) - self.restore_connection() # proper way to close cnx + cnx.close() self.assertRaises(ProgrammingError, cnx.check) self.assertRaises(ProgrammingError, cnx.set_shared_data, 'data', 0) self.assertRaises(ProgrammingError, cnx.get_shared_data, 'data') def test_web_compatible_request(self): config = cwconfig.CubicWebNoAppConfiguration() - with tempattr(self.cnx.vreg, 'config', config): - self.cnx.use_web_compatible_requests('http://perdu.com') - req = self.cnx.request() + cnx = _repo_connect(self.repo, login='admin', password='gingkow') + with tempattr(cnx.vreg, 'config', config): + cnx.use_web_compatible_requests('http://perdu.com') + req = cnx.request() self.assertEqual(req.base_url(), 'http://perdu.com') self.assertEqual(req.from_controller(), 'view') self.assertEqual(req.relative_path(), '') diff -r 84738d495ffd -r 793377697c81 test/unittest_entity.py --- a/test/unittest_entity.py Wed Sep 24 17:35:59 2014 +0200 +++ b/test/unittest_entity.py Wed Sep 24 18:04:30 2014 +0200 @@ -23,13 +23,13 @@ from logilab.common import tempattr from logilab.common.decorators import clear_cache -from cubicweb import Binary, Unauthorized +from cubicweb import Binary from cubicweb.devtools.testlib import CubicWebTC from cubicweb.mttransforms import HAS_TAL from cubicweb.entity import can_use_rest_path from cubicweb.entities import fetch_config from cubicweb.uilib import soup2xhtml -from cubicweb.schema import RQLVocabularyConstraint, RRQLExpression +from cubicweb.schema import RRQLExpression class EntityTC(CubicWebTC): @@ -45,163 +45,166 @@ cls.fetch_attrs, cls.cw_fetch_order = self.backup_dict[cls] def test_no_prefill_related_cache_bug(self): - session = self.session - usine = session.create_entity('Usine', lieu=u'Montbeliard') - produit = session.create_entity('Produit') - # usine was prefilled in glob_add_entity - # let's simulate produit creation without prefill - produit._cw_related_cache.clear() - # use add_relations - session.add_relations([('fabrique_par', [(produit.eid, usine.eid)])]) - self.assertEqual(1, len(usine.reverse_fabrique_par)) - self.assertEqual(1, len(produit.fabrique_par)) + with self.admin_access.repo_cnx() as cnx: + usine = cnx.create_entity('Usine', lieu=u'Montbeliard') + produit = cnx.create_entity('Produit') + # usine was prefilled in glob_add_entity + # let's simulate produit creation without prefill + produit._cw_related_cache.clear() + # use add_relations + cnx.add_relations([('fabrique_par', [(produit.eid, usine.eid)])]) + self.assertEqual(1, len(usine.reverse_fabrique_par)) + self.assertEqual(1, len(produit.fabrique_par)) def test_boolean_value(self): - e = self.vreg['etypes'].etype_class('CWUser')(self.request()) - self.assertTrue(e) + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('CWUser')(req) + self.assertTrue(e) def test_yams_inheritance(self): from entities import Note - e = self.vreg['etypes'].etype_class('SubNote')(self.request()) - self.assertIsInstance(e, Note) - e2 = self.vreg['etypes'].etype_class('SubNote')(self.request()) - self.assertIs(e.__class__, e2.__class__) + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('SubNote')(req) + self.assertIsInstance(e, Note) + e2 = self.vreg['etypes'].etype_class('SubNote')(req) + self.assertIs(e.__class__, e2.__class__) def test_has_eid(self): - e = self.vreg['etypes'].etype_class('CWUser')(self.request()) - self.assertEqual(e.eid, None) - self.assertEqual(e.has_eid(), False) - e.eid = 'X' - self.assertEqual(e.has_eid(), False) - e.eid = 0 - self.assertEqual(e.has_eid(), True) - e.eid = 2 - self.assertEqual(e.has_eid(), True) + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('CWUser')(req) + self.assertEqual(e.eid, None) + self.assertEqual(e.has_eid(), False) + e.eid = 'X' + self.assertEqual(e.has_eid(), False) + e.eid = 0 + self.assertEqual(e.has_eid(), True) + e.eid = 2 + self.assertEqual(e.has_eid(), True) def test_copy(self): - req = self.request() - req.create_entity('Tag', name=u'x') - p = req.create_entity('Personne', nom=u'toto') - oe = req.create_entity('Note', type=u'x') - self.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s', - {'t': oe.eid, 'u': p.eid}) - self.execute('SET TAG tags X WHERE X eid %(x)s', {'x': oe.eid}) - e = req.create_entity('Note', type=u'z') - e.copy_relations(oe.eid) - self.assertEqual(len(e.ecrit_par), 1) - self.assertEqual(e.ecrit_par[0].eid, p.eid) - self.assertEqual(len(e.reverse_tags), 1) - # check meta-relations are not copied, set on commit - self.assertEqual(len(e.created_by), 0) + with self.admin_access.web_request() as req: + req.create_entity('Tag', name=u'x') + p = req.create_entity('Personne', nom=u'toto') + oe = req.create_entity('Note', type=u'x') + req.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s', + {'t': oe.eid, 'u': p.eid}) + req.execute('SET TAG tags X WHERE X eid %(x)s', {'x': oe.eid}) + e = req.create_entity('Note', type=u'z') + e.copy_relations(oe.eid) + self.assertEqual(len(e.ecrit_par), 1) + self.assertEqual(e.ecrit_par[0].eid, p.eid) + self.assertEqual(len(e.reverse_tags), 1) + # check meta-relations are not copied, set on commit + self.assertEqual(len(e.created_by), 0) def test_copy_with_nonmeta_composite_inlined(self): - req = self.request() - p = req.create_entity('Personne', nom=u'toto') - oe = req.create_entity('Note', type=u'x') - self.schema['ecrit_par'].rdef('Note', 'Personne').composite = 'subject' - self.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s', - {'t': oe.eid, 'u': p.eid}) - e = req.create_entity('Note', type=u'z') - e.copy_relations(oe.eid) - self.assertFalse(e.ecrit_par) - self.assertTrue(oe.ecrit_par) + with self.admin_access.web_request() as req: + p = req.create_entity('Personne', nom=u'toto') + oe = req.create_entity('Note', type=u'x') + self.schema['ecrit_par'].rdef('Note', 'Personne').composite = 'subject' + req.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s', + {'t': oe.eid, 'u': p.eid}) + e = req.create_entity('Note', type=u'z') + e.copy_relations(oe.eid) + self.assertFalse(e.ecrit_par) + self.assertTrue(oe.ecrit_par) def test_copy_with_composite(self): - user = self.user() - adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] - e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}).get_entity(0, 0) - self.assertEqual(e.use_email[0].address, "toto@logilab.org") - self.assertEqual(e.use_email[0].eid, adeleid) - usereid = self.execute('INSERT CWUser X: X login "toto", X upassword "toto", X in_group G ' - 'WHERE G name "users"')[0][0] - e = self.execute('Any X WHERE X eid %(x)s', {'x': usereid}).get_entity(0, 0) - e.copy_relations(user.eid) - self.assertFalse(e.use_email) - self.assertFalse(e.primary_email) + with self.admin_access.web_request() as req: + adeleid = req.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] + e = req.execute('Any X WHERE X eid %(x)s', {'x': req.user.eid}).get_entity(0, 0) + self.assertEqual(e.use_email[0].address, "toto@logilab.org") + self.assertEqual(e.use_email[0].eid, adeleid) + usereid = req.execute('INSERT CWUser X: X login "toto", X upassword "toto", X in_group G ' + 'WHERE G name "users"')[0][0] + e = req.execute('Any X WHERE X eid %(x)s', {'x': usereid}).get_entity(0, 0) + e.copy_relations(req.user.eid) + self.assertFalse(e.use_email) + self.assertFalse(e.primary_email) def test_copy_with_non_initial_state(self): - user = self.user() - user = self.execute('INSERT CWUser X: X login "toto", X upassword %(pwd)s, X in_group G WHERE G name "users"', - {'pwd': 'toto'}).get_entity(0, 0) - self.commit() - user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') - self.commit() - eid2 = self.execute('INSERT CWUser X: X login "tutu", X upassword %(pwd)s', {'pwd': 'toto'})[0][0] - e = self.execute('Any X WHERE X eid %(x)s', {'x': eid2}).get_entity(0, 0) - e.copy_relations(user.eid) - self.commit() - e.cw_clear_relation_cache('in_state', 'subject') - self.assertEqual(e.cw_adapt_to('IWorkflowable').state, 'activated') + with self.admin_access.web_request() as req: + user = req.execute('INSERT CWUser X: X login "toto", X upassword %(pwd)s, X in_group G WHERE G name "users"', + {'pwd': 'toto'}).get_entity(0, 0) + req.cnx.commit() + user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') + req.cnx.commit() + eid2 = req.execute('INSERT CWUser X: X login "tutu", X upassword %(pwd)s', {'pwd': 'toto'})[0][0] + e = req.execute('Any X WHERE X eid %(x)s', {'x': eid2}).get_entity(0, 0) + e.copy_relations(user.eid) + req.cnx.commit() + e.cw_clear_relation_cache('in_state', 'subject') + self.assertEqual(e.cw_adapt_to('IWorkflowable').state, 'activated') def test_related_cache_both(self): - user = self.execute('Any X WHERE X eid %(x)s', {'x':self.user().eid}).get_entity(0, 0) - adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] - self.commit() - self.assertEqual(user._cw_related_cache, {}) - email = user.primary_email[0] - self.assertEqual(sorted(user._cw_related_cache), ['primary_email_subject']) - self.assertEqual(list(email._cw_related_cache), ['primary_email_object']) - groups = user.in_group - self.assertEqual(sorted(user._cw_related_cache), ['in_group_subject', 'primary_email_subject']) - for group in groups: - self.assertFalse('in_group_subject' in group._cw_related_cache, list(group._cw_related_cache)) - user.cw_clear_all_caches() - user.related('in_group', entities=True) - self.assertIn('in_group_subject', user._cw_related_cache) - user.cw_clear_all_caches() - user.related('in_group', targettypes=('CWGroup',), entities=True) - self.assertNotIn('in_group_subject', user._cw_related_cache) + with self.admin_access.web_request() as req: + user = req.execute('Any X WHERE X eid %(x)s', {'x':req.user.eid}).get_entity(0, 0) + adeleid = req.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] + req.cnx.commit() + self.assertEqual(user._cw_related_cache, {}) + email = user.primary_email[0] + self.assertEqual(sorted(user._cw_related_cache), ['primary_email_subject']) + self.assertEqual(list(email._cw_related_cache), ['primary_email_object']) + groups = user.in_group + self.assertEqual(sorted(user._cw_related_cache), ['in_group_subject', 'primary_email_subject']) + for group in groups: + self.assertNotIn('in_group_subject', group._cw_related_cache) + user.cw_clear_all_caches() + user.related('in_group', entities=True) + self.assertIn('in_group_subject', user._cw_related_cache) + user.cw_clear_all_caches() + user.related('in_group', targettypes=('CWGroup',), entities=True) + self.assertNotIn('in_group_subject', user._cw_related_cache) def test_related_limit(self): - req = self.request() - p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') - for tag in u'abcd': - req.create_entity('Tag', name=tag) - self.execute('SET X tags Y WHERE X is Tag, Y is Personne') - self.assertEqual(len(p.related('tags', 'object', limit=2)), 2) - self.assertEqual(len(p.related('tags', 'object')), 4) - p.cw_clear_all_caches() - self.assertEqual(len(p.related('tags', 'object', entities=True, limit=2)), 2) - self.assertEqual(len(p.related('tags', 'object', entities=True)), 4) + with self.admin_access.web_request() as req: + p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') + for tag in u'abcd': + req.create_entity('Tag', name=tag) + req.execute('SET X tags Y WHERE X is Tag, Y is Personne') + self.assertEqual(len(p.related('tags', 'object', limit=2)), 2) + self.assertEqual(len(p.related('tags', 'object')), 4) + p.cw_clear_all_caches() + self.assertEqual(len(p.related('tags', 'object', entities=True, limit=2)), 2) + self.assertEqual(len(p.related('tags', 'object', entities=True)), 4) def test_related_targettypes(self): - req = self.request() - p = req.create_entity('Personne', nom=u'Loxodonta', prenom=u'Babar') - n = req.create_entity('Note', type=u'scratch', ecrit_par=p) - t = req.create_entity('Tag', name=u'a tag', tags=(p, n)) - self.commit() - req = self.request() - t = req.entity_from_eid(t.eid) - self.assertEqual(2, t.related('tags').rowcount) - self.assertEqual(1, t.related('tags', targettypes=('Personne',)).rowcount) - self.assertEqual(1, t.related('tags', targettypes=('Note',)).rowcount) + with self.admin_access.web_request() as req: + p = req.create_entity('Personne', nom=u'Loxodonta', prenom=u'Babar') + n = req.create_entity('Note', type=u'scratch', ecrit_par=p) + t = req.create_entity('Tag', name=u'a tag', tags=(p, n)) + req.cnx.commit() + with self.admin_access.web_request() as req: + t = req.entity_from_eid(t.eid) + self.assertEqual(2, t.related('tags').rowcount) + self.assertEqual(1, t.related('tags', targettypes=('Personne',)).rowcount) + self.assertEqual(1, t.related('tags', targettypes=('Note',)).rowcount) def test_cw_instantiate_relation(self): - req = self.request() - p1 = req.create_entity('Personne', nom=u'di') - p2 = req.create_entity('Personne', nom=u'mascio') - t = req.create_entity('Tag', name=u't0', tags=[]) - self.assertCountEqual(t.tags, []) - t = req.create_entity('Tag', name=u't1', tags=p1) - self.assertCountEqual(t.tags, [p1]) - t = req.create_entity('Tag', name=u't2', tags=p1.eid) - self.assertCountEqual(t.tags, [p1]) - t = req.create_entity('Tag', name=u't3', tags=[p1, p2.eid]) - self.assertCountEqual(t.tags, [p1, p2]) + with self.admin_access.web_request() as req: + p1 = req.create_entity('Personne', nom=u'di') + p2 = req.create_entity('Personne', nom=u'mascio') + t = req.create_entity('Tag', name=u't0', tags=[]) + self.assertCountEqual(t.tags, []) + t = req.create_entity('Tag', name=u't1', tags=p1) + self.assertCountEqual(t.tags, [p1]) + t = req.create_entity('Tag', name=u't2', tags=p1.eid) + self.assertCountEqual(t.tags, [p1]) + t = req.create_entity('Tag', name=u't3', tags=[p1, p2.eid]) + self.assertCountEqual(t.tags, [p1, p2]) def test_cw_instantiate_reverse_relation(self): - req = self.request() - t1 = req.create_entity('Tag', name=u't1') - t2 = req.create_entity('Tag', name=u't2') - p = req.create_entity('Personne', nom=u'di mascio', reverse_tags=t1) - self.assertCountEqual(p.reverse_tags, [t1]) - p = req.create_entity('Personne', nom=u'di mascio', reverse_tags=t1.eid) - self.assertCountEqual(p.reverse_tags, [t1]) - p = req.create_entity('Personne', nom=u'di mascio', reverse_tags=[t1, t2.eid]) - self.assertCountEqual(p.reverse_tags, [t1, t2]) + with self.admin_access.web_request() as req: + t1 = req.create_entity('Tag', name=u't1') + t2 = req.create_entity('Tag', name=u't2') + p = req.create_entity('Personne', nom=u'di mascio', reverse_tags=t1) + self.assertCountEqual(p.reverse_tags, [t1]) + p = req.create_entity('Personne', nom=u'di mascio', reverse_tags=t1.eid) + self.assertCountEqual(p.reverse_tags, [t1]) + p = req.create_entity('Personne', nom=u'di mascio', reverse_tags=[t1, t2.eid]) + self.assertCountEqual(p.reverse_tags, [t1, t2]) def test_fetch_rql(self): - user = self.user() Personne = self.vreg['etypes'].etype_class('Personne') Societe = self.vreg['etypes'].etype_class('Societe') Note = self.vreg['etypes'].etype_class('Note') @@ -216,47 +219,49 @@ cm.__enter__() torestore.append(cm) try: - # testing basic fetch_attrs attribute - self.assertEqual(Personne.fetch_rql(user), - 'Any X,AA,AB,AC ORDERBY AA ' - 'WHERE X is_instance_of Personne, X nom AA, X prenom AB, X modification_date AC') - # testing unknown attributes - Personne.fetch_attrs = ('bloug', 'beep') - self.assertEqual(Personne.fetch_rql(user), 'Any X WHERE X is_instance_of Personne') - # testing one non final relation - Personne.fetch_attrs = ('nom', 'prenom', 'travaille') - self.assertEqual(Personne.fetch_rql(user), - 'Any X,AA,AB,AC,AD ORDERBY AA ' - 'WHERE X is_instance_of Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD') - # testing two non final relations - Personne.fetch_attrs = ('nom', 'prenom', 'travaille', 'evaluee') - self.assertEqual(Personne.fetch_rql(user), - 'Any X,AA,AB,AC,AD,AE ORDERBY AA ' - 'WHERE X is_instance_of Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD, ' - 'X evaluee AE?') - # testing one non final relation with recursion - Personne.fetch_attrs = ('nom', 'prenom', 'travaille') - Societe.fetch_attrs = ('nom', 'evaluee') - self.assertEqual(Personne.fetch_rql(user), - 'Any X,AA,AB,AC,AD,AE,AF ORDERBY AA,AF DESC ' - 'WHERE X is_instance_of Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD, ' - 'AC evaluee AE?, AE modification_date AF' - ) - # testing symmetric relation - Personne.fetch_attrs = ('nom', 'connait') - self.assertEqual(Personne.fetch_rql(user), 'Any X,AA,AB ORDERBY AA ' - 'WHERE X is_instance_of Personne, X nom AA, X connait AB?') - # testing optional relation - peschema.subjrels['travaille'].rdef(peschema, seschema).cardinality = '?*' - Personne.fetch_attrs = ('nom', 'prenom', 'travaille') - Societe.fetch_attrs = ('nom',) - self.assertEqual(Personne.fetch_rql(user), - 'Any X,AA,AB,AC,AD ORDERBY AA WHERE X is_instance_of Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD') - # testing relation with cardinality > 1 - peschema.subjrels['travaille'].rdef(peschema, seschema).cardinality = '**' - self.assertEqual(Personne.fetch_rql(user), - 'Any X,AA,AB ORDERBY AA WHERE X is_instance_of Personne, X nom AA, X prenom AB') - # XXX test unauthorized attribute + with self.admin_access.web_request() as req: + user = req.user + # testing basic fetch_attrs attribute + self.assertEqual(Personne.fetch_rql(user), + 'Any X,AA,AB,AC ORDERBY AA ' + 'WHERE X is_instance_of Personne, X nom AA, X prenom AB, X modification_date AC') + # testing unknown attributes + Personne.fetch_attrs = ('bloug', 'beep') + self.assertEqual(Personne.fetch_rql(user), 'Any X WHERE X is_instance_of Personne') + # testing one non final relation + Personne.fetch_attrs = ('nom', 'prenom', 'travaille') + self.assertEqual(Personne.fetch_rql(user), + 'Any X,AA,AB,AC,AD ORDERBY AA ' + 'WHERE X is_instance_of Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD') + # testing two non final relations + Personne.fetch_attrs = ('nom', 'prenom', 'travaille', 'evaluee') + self.assertEqual(Personne.fetch_rql(user), + 'Any X,AA,AB,AC,AD,AE ORDERBY AA ' + 'WHERE X is_instance_of Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD, ' + 'X evaluee AE?') + # testing one non final relation with recursion + Personne.fetch_attrs = ('nom', 'prenom', 'travaille') + Societe.fetch_attrs = ('nom', 'evaluee') + self.assertEqual(Personne.fetch_rql(user), + 'Any X,AA,AB,AC,AD,AE,AF ORDERBY AA,AF DESC ' + 'WHERE X is_instance_of Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD, ' + 'AC evaluee AE?, AE modification_date AF' + ) + # testing symmetric relation + Personne.fetch_attrs = ('nom', 'connait') + self.assertEqual(Personne.fetch_rql(user), 'Any X,AA,AB ORDERBY AA ' + 'WHERE X is_instance_of Personne, X nom AA, X connait AB?') + # testing optional relation + peschema.subjrels['travaille'].rdef(peschema, seschema).cardinality = '?*' + Personne.fetch_attrs = ('nom', 'prenom', 'travaille') + Societe.fetch_attrs = ('nom',) + self.assertEqual(Personne.fetch_rql(user), + 'Any X,AA,AB,AC,AD ORDERBY AA WHERE X is_instance_of Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD') + # testing relation with cardinality > 1 + peschema.subjrels['travaille'].rdef(peschema, seschema).cardinality = '**' + self.assertEqual(Personne.fetch_rql(user), + 'Any X,AA,AB ORDERBY AA WHERE X is_instance_of Personne, X nom AA, X prenom AB') + # XXX test unauthorized attribute finally: # fetch_attrs restored by generic tearDown for cm in torestore: @@ -270,97 +275,107 @@ Personne.fetch_attrs, Personne.cw_fetch_order = fetch_config(('nom', 'type')) Note.fetch_attrs, Note.cw_fetch_order = fetch_config(('type',)) SubNote.fetch_attrs, SubNote.cw_fetch_order = fetch_config(('type',)) - p = self.request().create_entity('Personne', nom=u'pouet') - self.assertEqual(p.cw_related_rql('evaluee'), - 'Any X,AA,AB ORDERBY AA WHERE E eid %(x)s, E evaluee X, ' - 'X type AA, X modification_date AB') - n = self.request().create_entity('Note') - self.assertEqual(n.cw_related_rql('evaluee', role='object', - targettypes=('Societe', 'Personne')), - "Any X,AA ORDERBY AB DESC WHERE E eid %(x)s, X evaluee E, " - "X is IN(Personne, Societe), X nom AA, " - "X modification_date AB") - Personne.fetch_attrs, Personne.cw_fetch_order = fetch_config(('nom', )) - # XXX - self.assertEqual(p.cw_related_rql('evaluee'), - 'Any X,AA ORDERBY AA DESC ' - 'WHERE E eid %(x)s, E evaluee X, X modification_date AA') + with self.admin_access.web_request() as req: + p = req.create_entity('Personne', nom=u'pouet') + self.assertEqual(p.cw_related_rql('evaluee'), + 'Any X,AA,AB ORDERBY AA WHERE E eid %(x)s, E evaluee X, ' + 'X type AA, X modification_date AB') + n = req.create_entity('Note') + self.assertEqual(n.cw_related_rql('evaluee', role='object', + targettypes=('Societe', 'Personne')), + "Any X,AA ORDERBY AB DESC WHERE E eid %(x)s, X evaluee E, " + "X is IN(Personne, Societe), X nom AA, " + "X modification_date AB") + Personne.fetch_attrs, Personne.cw_fetch_order = fetch_config(('nom', )) + # XXX + self.assertEqual(p.cw_related_rql('evaluee'), + 'Any X,AA ORDERBY AA DESC ' + 'WHERE E eid %(x)s, E evaluee X, X modification_date AA') - tag = self.vreg['etypes'].etype_class('Tag')(self.request()) - self.assertEqual(tag.cw_related_rql('tags', 'subject'), - 'Any X,AA ORDERBY AA DESC ' - 'WHERE E eid %(x)s, E tags X, X modification_date AA') - self.assertEqual(tag.cw_related_rql('tags', 'subject', ('Personne',)), - 'Any X,AA,AB ORDERBY AA ' - 'WHERE E eid %(x)s, E tags X, X is Personne, X nom AA, ' - 'X modification_date AB') + tag = self.vreg['etypes'].etype_class('Tag')(req) + self.assertEqual(tag.cw_related_rql('tags', 'subject'), + 'Any X,AA ORDERBY AA DESC ' + 'WHERE E eid %(x)s, E tags X, X modification_date AA') + self.assertEqual(tag.cw_related_rql('tags', 'subject', ('Personne',)), + 'Any X,AA,AB ORDERBY AA ' + 'WHERE E eid %(x)s, E tags X, X is Personne, X nom AA, ' + 'X modification_date AB') def test_related_rql_ambiguous_cant_use_fetch_order(self): - tag = self.vreg['etypes'].etype_class('Tag')(self.request()) - for ttype in self.schema['tags'].objects(): - self.vreg['etypes'].etype_class(ttype).fetch_attrs = ('modification_date',) - self.assertEqual(tag.cw_related_rql('tags', 'subject'), - 'Any X,AA ORDERBY AA DESC ' - 'WHERE E eid %(x)s, E tags X, X modification_date AA') + with self.admin_access.web_request() as req: + tag = self.vreg['etypes'].etype_class('Tag')(req) + for ttype in self.schema['tags'].objects(): + self.vreg['etypes'].etype_class(ttype).fetch_attrs = ('modification_date',) + self.assertEqual(tag.cw_related_rql('tags', 'subject'), + 'Any X,AA ORDERBY AA DESC ' + 'WHERE E eid %(x)s, E tags X, X modification_date AA') def test_related_rql_fetch_ambiguous_rtype(self): - soc_etype = self.vreg['etypes'].etype_class('Societe') - soc = soc_etype(self.request()) - soc_etype.fetch_attrs = ('fournit',) - self.vreg['etypes'].etype_class('Service').fetch_attrs = ('fabrique_par',) - self.vreg['etypes'].etype_class('Produit').fetch_attrs = ('fabrique_par',) - self.vreg['etypes'].etype_class('Usine').fetch_attrs = ('lieu',) - self.vreg['etypes'].etype_class('Personne').fetch_attrs = ('nom',) - self.assertEqual(soc.cw_related_rql('fournit', 'subject'), - 'Any X,A WHERE E eid %(x)s, E fournit X, X fabrique_par A') + etvreg = self.vreg['etypes'] + soc_etype = etvreg.etype_class('Societe') + with self.admin_access.web_request() as req: + soc = soc_etype(req) + soc_etype.fetch_attrs = ('fournit',) + etvreg.etype_class('Service').fetch_attrs = ('fabrique_par',) + etvreg.etype_class('Produit').fetch_attrs = ('fabrique_par',) + etvreg.etype_class('Usine').fetch_attrs = ('lieu',) + etvreg.etype_class('Personne').fetch_attrs = ('nom',) + self.assertEqual(soc.cw_related_rql('fournit', 'subject'), + 'Any X,A WHERE E eid %(x)s, E fournit X, X fabrique_par A') def test_unrelated_rql_security_1_manager(self): - user = self.request().user - rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] - self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' - 'WHERE NOT A use_email O, S eid %(x)s, ' - 'O is_instance_of EmailAddress, O address AA, O alias AB, O modification_date AC') + with self.admin_access.web_request() as req: + user = req.user + rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] + self.assertEqual(rql, + 'Any O,AA,AB,AC ORDERBY AC DESC ' + 'WHERE NOT A use_email O, S eid %(x)s, ' + 'O is_instance_of EmailAddress, O address AA, O alias AB, ' + 'O modification_date AC') def test_unrelated_rql_security_1_user(self): - req = self.request() - self.create_user(req, 'toto') - self.login('toto') - user = req.user - rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] - self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' - 'WHERE NOT A use_email O, S eid %(x)s, ' - 'O is_instance_of EmailAddress, O address AA, O alias AB, O modification_date AC') - user = self.execute('Any X WHERE X login "admin"').get_entity(0, 0) - rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] - self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' - 'WHERE NOT A use_email O, S eid %(x)s, ' - 'O is EmailAddress, O address AA, O alias AB, O modification_date AC, AD eid %(AE)s, ' - 'EXISTS(S identity AD, NOT AD in_group AF, AF name "guests", AF is CWGroup), A is CWUser') + with self.admin_access.web_request() as req: + self.create_user(req, 'toto') + with self.new_access('toto').web_request() as req: + user = req.user # XXX + rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] + self.assertEqual(rql, + 'Any O,AA,AB,AC ORDERBY AC DESC ' + 'WHERE NOT A use_email O, S eid %(x)s, ' + 'O is_instance_of EmailAddress, O address AA, O alias AB, O modification_date AC') + user = req.execute('Any X WHERE X login "admin"').get_entity(0, 0) + rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] + self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' + 'WHERE NOT A use_email O, S eid %(x)s, ' + 'O is EmailAddress, O address AA, O alias AB, O modification_date AC, AD eid %(AE)s, ' + 'EXISTS(S identity AD, NOT AD in_group AF, AF name "guests", AF is CWGroup), A is CWUser') def test_unrelated_rql_security_1_anon(self): - self.login('anon') - user = self.request().user - rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] - self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' - 'WHERE NOT A use_email O, S eid %(x)s, ' - 'O is EmailAddress, O address AA, O alias AB, O modification_date AC, AD eid %(AE)s, ' - 'EXISTS(S identity AD, NOT AD in_group AF, AF name "guests", AF is CWGroup), A is CWUser') + with self.new_access('anon').web_request() as req: + user = req.user + rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] + self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' + 'WHERE NOT A use_email O, S eid %(x)s, ' + 'O is EmailAddress, O address AA, O alias AB, O modification_date AC, AD eid %(AE)s, ' + 'EXISTS(S identity AD, NOT AD in_group AF, AF name "guests", AF is CWGroup), A is CWUser') def test_unrelated_rql_security_2(self): - email = self.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) - rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] - self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' - 'WHERE NOT S use_email O, O eid %(x)s, S is_instance_of CWUser, ' - 'S login AA, S firstname AB, S surname AC, S modification_date AD') - self.login('anon') + with self.admin_access.web_request() as req: + email = req.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) + rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] + self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' + 'WHERE NOT S use_email O, O eid %(x)s, S is_instance_of CWUser, ' + 'S login AA, S firstname AB, S surname AC, S modification_date AD') + req.cnx.commit() rperms = self.schema['EmailAddress'].permissions['read'] clear_cache(self.schema['EmailAddress'], 'get_groups') clear_cache(self.schema['EmailAddress'], 'get_rqlexprs') self.schema['EmailAddress'].permissions['read'] = ('managers', 'users', 'guests',) try: - email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) - rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] - self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' + with self.new_access('anon').web_request() as req: + email = req.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) + rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] + self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' 'WHERE NOT S use_email O, O eid %(x)s, S is CWUser, ' 'S login AA, S firstname AB, S surname AC, S modification_date AD, ' 'AE eid %(AF)s, EXISTS(S identity AE, NOT AE in_group AG, AG name "guests", AG is CWGroup)') @@ -369,27 +384,36 @@ clear_cache(self.schema['EmailAddress'], 'get_rqlexprs') self.schema['EmailAddress'].permissions['read'] = rperms + def test_cw_linkable_rql(self): + with self.admin_access.web_request() as req: + email = req.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) + rql = email.cw_linkable_rql('use_email', 'CWUser', 'object')[0] + self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' + 'WHERE O eid %(x)s, S is_instance_of CWUser, ' + 'S login AA, S firstname AB, S surname AC, S modification_date AD') def test_unrelated_rql_security_nonexistant(self): - self.login('anon') - email = self.vreg['etypes'].etype_class('EmailAddress')(self.request()) - rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] - self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' + with self.new_access('anon').web_request() as req: + email = self.vreg['etypes'].etype_class('EmailAddress')(req) + rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] + self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' 'WHERE S is CWUser, ' 'S login AA, S firstname AB, S surname AC, S modification_date AD, ' 'AE eid %(AF)s, EXISTS(S identity AE, NOT AE in_group AG, AG name "guests", AG is CWGroup)') def test_unrelated_rql_constraints_creation_subject(self): - person = self.vreg['etypes'].etype_class('Personne')(self.request()) - rql = person.cw_unrelated_rql('connait', 'Personne', 'subject')[0] - self.assertEqual( + with self.admin_access.web_request() as req: + person = self.vreg['etypes'].etype_class('Personne')(req) + rql = person.cw_unrelated_rql('connait', 'Personne', 'subject')[0] + self.assertEqual( rql, 'Any O,AA,AB,AC ORDERBY AC DESC WHERE ' 'O is_instance_of Personne, O nom AA, O prenom AB, O modification_date AC') def test_unrelated_rql_constraints_creation_object(self): - person = self.vreg['etypes'].etype_class('Personne')(self.request()) - rql = person.cw_unrelated_rql('connait', 'Personne', 'object')[0] - self.assertEqual( + with self.admin_access.web_request() as req: + person = self.vreg['etypes'].etype_class('Personne')(req) + rql = person.cw_unrelated_rql('connait', 'Personne', 'object')[0] + self.assertEqual( rql, 'Any S,AA,AB,AC ORDERBY AC DESC WHERE ' 'S is Personne, S nom AA, S prenom AB, S modification_date AC, ' 'NOT (S connait AD, AD nom "toto"), AD is Personne, ' @@ -401,25 +425,28 @@ rdef = self.schema['Personne'].rdef('connait') perm_rrqle = RRQLExpression('U has_update_permission S') with self.temporary_permissions((rdef, {'add': (perm_rrqle,)})): - person = self.vreg['etypes'].etype_class('Personne')(self.request()) - rql = person.cw_unrelated_rql('connait', 'Personne', 'subject')[0] - self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC WHERE ' + with self.admin_access.web_request() as req: + person = self.vreg['etypes'].etype_class('Personne')(req) + rql = person.cw_unrelated_rql('connait', 'Personne', 'subject')[0] + self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC WHERE ' 'O is_instance_of Personne, O nom AA, O prenom AB, ' 'O modification_date AC') def test_unrelated_rql_constraints_edition_subject(self): - person = self.request().create_entity('Personne', nom=u'sylvain') - rql = person.cw_unrelated_rql('connait', 'Personne', 'subject')[0] - self.assertEqual( - rql, 'Any O,AA,AB,AC ORDERBY AC DESC WHERE ' + with self.admin_access.web_request() as req: + person = req.create_entity('Personne', nom=u'sylvain') + rql = person.cw_unrelated_rql('connait', 'Personne', 'subject')[0] + self.assertEqual( + rql, 'Any O,AA,AB,AC ORDERBY AC DESC WHERE ' 'NOT S connait O, S eid %(x)s, O is Personne, ' 'O nom AA, O prenom AB, O modification_date AC, ' 'NOT S identity O') def test_unrelated_rql_constraints_edition_object(self): - person = self.request().create_entity('Personne', nom=u'sylvain') - rql = person.cw_unrelated_rql('connait', 'Personne', 'object')[0] - self.assertEqual( + with self.admin_access.web_request() as req: + person = req.create_entity('Personne', nom=u'sylvain') + rql = person.cw_unrelated_rql('connait', 'Personne', 'object')[0] + self.assertEqual( rql, 'Any S,AA,AB,AC ORDERBY AC DESC WHERE ' 'NOT S connait O, O eid %(x)s, S is Personne, ' 'S nom AA, S prenom AB, S modification_date AC, ' @@ -427,95 +454,95 @@ 'EXISTS(S travaille AE, AE nom "tutu")') def test_unrelated_rql_s_linkto_s(self): - req = self.request() - person = self.vreg['etypes'].etype_class('Personne')(req) - self.vreg['etypes'].etype_class('Personne').fetch_attrs = () - soc = req.create_entity('Societe', nom=u'logilab') - lt_infos = {('actionnaire', 'subject'): [soc.eid]} - rql, args = person.cw_unrelated_rql('associe', 'Personne', 'subject', + with self.admin_access.web_request() as req: + person = self.vreg['etypes'].etype_class('Personne')(req) + self.vreg['etypes'].etype_class('Personne').fetch_attrs = () + soc = req.create_entity('Societe', nom=u'logilab') + lt_infos = {('actionnaire', 'subject'): [soc.eid]} + rql, args = person.cw_unrelated_rql('associe', 'Personne', 'subject', lt_infos=lt_infos) - self.assertEqual(u'Any O ORDERBY O WHERE O is Personne, ' - u'EXISTS(AA eid %(SOC)s, O actionnaire AA)', rql) - self.assertEqual({'SOC': soc.eid}, args) + self.assertEqual(u'Any O ORDERBY O WHERE O is Personne, ' + u'EXISTS(AA eid %(SOC)s, O actionnaire AA)', rql) + self.assertEqual({'SOC': soc.eid}, args) def test_unrelated_rql_s_linkto_o(self): - req = self.request() - person = self.vreg['etypes'].etype_class('Personne')(req) - self.vreg['etypes'].etype_class('Societe').fetch_attrs = () - soc = req.create_entity('Societe', nom=u'logilab') - lt_infos = {('contrat_exclusif', 'object'): [soc.eid]} - rql, args = person.cw_unrelated_rql('actionnaire', 'Societe', 'subject', - lt_infos=lt_infos) - self.assertEqual(u'Any O ORDERBY O WHERE NOT A actionnaire O, ' - u'O is_instance_of Societe, NOT EXISTS(O eid %(O)s), ' - u'A is Personne', rql) - self.assertEqual({'O': soc.eid}, args) + with self.admin_access.web_request() as req: + person = self.vreg['etypes'].etype_class('Personne')(req) + self.vreg['etypes'].etype_class('Societe').fetch_attrs = () + soc = req.create_entity('Societe', nom=u'logilab') + lt_infos = {('contrat_exclusif', 'object'): [soc.eid]} + rql, args = person.cw_unrelated_rql('actionnaire', 'Societe', 'subject', + lt_infos=lt_infos) + self.assertEqual(u'Any O ORDERBY O WHERE NOT A actionnaire O, ' + u'O is_instance_of Societe, NOT EXISTS(O eid %(O)s), ' + u'A is Personne', rql) + self.assertEqual({'O': soc.eid}, args) def test_unrelated_rql_o_linkto_s(self): - req = self.request() - soc = self.vreg['etypes'].etype_class('Societe')(req) - self.vreg['etypes'].etype_class('Personne').fetch_attrs = () - person = req.create_entity('Personne', nom=u'florent') - lt_infos = {('contrat_exclusif', 'subject'): [person.eid]} - rql, args = soc.cw_unrelated_rql('actionnaire', 'Personne', 'object', - lt_infos=lt_infos) - self.assertEqual(u'Any S ORDERBY S WHERE NOT S actionnaire A, ' - u'S is_instance_of Personne, NOT EXISTS(S eid %(S)s), ' - u'A is Societe', rql) - self.assertEqual({'S': person.eid}, args) + with self.admin_access.web_request() as req: + soc = self.vreg['etypes'].etype_class('Societe')(req) + self.vreg['etypes'].etype_class('Personne').fetch_attrs = () + person = req.create_entity('Personne', nom=u'florent') + lt_infos = {('contrat_exclusif', 'subject'): [person.eid]} + rql, args = soc.cw_unrelated_rql('actionnaire', 'Personne', 'object', + lt_infos=lt_infos) + self.assertEqual(u'Any S ORDERBY S WHERE NOT S actionnaire A, ' + u'S is_instance_of Personne, NOT EXISTS(S eid %(S)s), ' + u'A is Societe', rql) + self.assertEqual({'S': person.eid}, args) def test_unrelated_rql_o_linkto_o(self): - req = self.request() - soc = self.vreg['etypes'].etype_class('Societe')(req) - self.vreg['etypes'].etype_class('Personne').fetch_attrs = () - person = req.create_entity('Personne', nom=u'florent') - lt_infos = {('actionnaire', 'object'): [person.eid]} - rql, args = soc.cw_unrelated_rql('dirige', 'Personne', 'object', - lt_infos=lt_infos) - self.assertEqual(u'Any S ORDERBY S WHERE NOT S dirige A, ' - u'S is_instance_of Personne, EXISTS(S eid %(S)s), ' - u'A is Societe', rql) - self.assertEqual({'S': person.eid}, args) + with self.admin_access.web_request() as req: + soc = self.vreg['etypes'].etype_class('Societe')(req) + self.vreg['etypes'].etype_class('Personne').fetch_attrs = () + person = req.create_entity('Personne', nom=u'florent') + lt_infos = {('actionnaire', 'object'): [person.eid]} + rql, args = soc.cw_unrelated_rql('dirige', 'Personne', 'object', + lt_infos=lt_infos) + self.assertEqual(u'Any S ORDERBY S WHERE NOT S dirige A, ' + u'S is_instance_of Personne, EXISTS(S eid %(S)s), ' + u'A is Societe', rql) + self.assertEqual({'S': person.eid}, args) def test_unrelated_rql_s_linkto_s_no_info(self): - req = self.request() - person = self.vreg['etypes'].etype_class('Personne')(req) - self.vreg['etypes'].etype_class('Personne').fetch_attrs = () - soc = req.create_entity('Societe', nom=u'logilab') - rql, args = person.cw_unrelated_rql('associe', 'Personne', 'subject') - self.assertEqual(u'Any O ORDERBY O WHERE O is_instance_of Personne', rql) - self.assertEqual({}, args) + with self.admin_access.web_request() as req: + person = self.vreg['etypes'].etype_class('Personne')(req) + self.vreg['etypes'].etype_class('Personne').fetch_attrs = () + soc = req.create_entity('Societe', nom=u'logilab') + rql, args = person.cw_unrelated_rql('associe', 'Personne', 'subject') + self.assertEqual(u'Any O ORDERBY O WHERE O is_instance_of Personne', rql) + self.assertEqual({}, args) def test_unrelated_rql_s_linkto_s_unused_info(self): - req = self.request() - person = self.vreg['etypes'].etype_class('Personne')(req) - self.vreg['etypes'].etype_class('Personne').fetch_attrs = () - other_p = req.create_entity('Personne', nom=u'titi') - lt_infos = {('dirige', 'subject'): [other_p.eid]} - rql, args = person.cw_unrelated_rql('associe', 'Personne', 'subject', - lt_infos=lt_infos) - self.assertEqual(u'Any O ORDERBY O WHERE O is_instance_of Personne', rql) + with self.admin_access.web_request() as req: + person = self.vreg['etypes'].etype_class('Personne')(req) + self.vreg['etypes'].etype_class('Personne').fetch_attrs = () + other_p = req.create_entity('Personne', nom=u'titi') + lt_infos = {('dirige', 'subject'): [other_p.eid]} + rql, args = person.cw_unrelated_rql('associe', 'Personne', 'subject', + lt_infos=lt_infos) + self.assertEqual(u'Any O ORDERBY O WHERE O is_instance_of Personne', rql) def test_unrelated_base(self): - req = self.request() - p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') - e = req.create_entity('Tag', name=u'x') - related = [r.eid for r in e.tags] - self.assertEqual(related, []) - unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')] - self.assertTrue(p.eid in unrelated) - self.execute('SET X tags Y WHERE X is Tag, Y is Personne') - e = self.execute('Any X WHERE X is Tag').get_entity(0, 0) - unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')] - self.assertFalse(p.eid in unrelated) + with self.admin_access.web_request() as req: + p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') + e = req.create_entity('Tag', name=u'x') + related = [r.eid for r in e.tags] + self.assertEqual(related, []) + unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')] + self.assertIn(p.eid, unrelated) + req.execute('SET X tags Y WHERE X is Tag, Y is Personne') + e = req.execute('Any X WHERE X is Tag').get_entity(0, 0) + unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')] + self.assertNotIn(p.eid, unrelated) def test_unrelated_limit(self): - req = self.request() - e = req.create_entity('Tag', name=u'x') - req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') - req.create_entity('Personne', nom=u'thenault', prenom=u'sylvain') - self.assertEqual(len(e.unrelated('tags', 'Personne', 'subject', limit=1)), - 1) + with self.admin_access.web_request() as req: + e = req.create_entity('Tag', name=u'x') + req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') + req.create_entity('Personne', nom=u'thenault', prenom=u'sylvain') + self.assertEqual(len(e.unrelated('tags', 'Personne', 'subject', limit=1)), + 1) def test_unrelated_security(self): rperms = self.schema['EmailAddress'].permissions['read'] @@ -523,206 +550,215 @@ clear_cache(self.schema['EmailAddress'], 'get_rqlexprs') self.schema['EmailAddress'].permissions['read'] = ('managers', 'users', 'guests',) try: - email = self.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) - rset = email.unrelated('use_email', 'CWUser', 'object') - self.assertEqual([x.login for x in rset.entities()], [u'admin', u'anon']) - user = self.request().user - rset = user.unrelated('use_email', 'EmailAddress', 'subject') - self.assertEqual([x.address for x in rset.entities()], [u'hop']) - req = self.request() - self.create_user(req, 'toto') - self.login('toto') - email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) - rset = email.unrelated('use_email', 'CWUser', 'object') - self.assertEqual([x.login for x in rset.entities()], ['toto']) - user = self.request().user - rset = user.unrelated('use_email', 'EmailAddress', 'subject') - self.assertEqual([x.address for x in rset.entities()], ['hop']) - user = self.execute('Any X WHERE X login "admin"').get_entity(0, 0) - rset = user.unrelated('use_email', 'EmailAddress', 'subject') - self.assertEqual([x.address for x in rset.entities()], []) - self.login('anon') - email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) - rset = email.unrelated('use_email', 'CWUser', 'object') - self.assertEqual([x.login for x in rset.entities()], []) - user = self.request().user - rset = user.unrelated('use_email', 'EmailAddress', 'subject') - self.assertEqual([x.address for x in rset.entities()], []) + with self.admin_access.web_request() as req: + email = req.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) + rset = email.unrelated('use_email', 'CWUser', 'object') + self.assertEqual([x.login for x in rset.entities()], [u'admin', u'anon']) + user = req.user + rset = user.unrelated('use_email', 'EmailAddress', 'subject') + self.assertEqual([x.address for x in rset.entities()], [u'hop']) + self.create_user(req, 'toto') + with self.new_access('toto').web_request() as req: + email = req.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) + rset = email.unrelated('use_email', 'CWUser', 'object') + self.assertEqual([x.login for x in rset.entities()], ['toto']) + user = req.user + rset = user.unrelated('use_email', 'EmailAddress', 'subject') + self.assertEqual([x.address for x in rset.entities()], ['hop']) + user = req.execute('Any X WHERE X login "admin"').get_entity(0, 0) + rset = user.unrelated('use_email', 'EmailAddress', 'subject') + self.assertEqual([x.address for x in rset.entities()], []) + with self.new_access('anon').web_request() as req: + email = req.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) + rset = email.unrelated('use_email', 'CWUser', 'object') + self.assertEqual([x.login for x in rset.entities()], []) + user = req.user + rset = user.unrelated('use_email', 'EmailAddress', 'subject') + self.assertEqual([x.address for x in rset.entities()], []) finally: clear_cache(self.schema['EmailAddress'], 'get_groups') clear_cache(self.schema['EmailAddress'], 'get_rqlexprs') self.schema['EmailAddress'].permissions['read'] = rperms def test_unrelated_new_entity(self): - e = self.vreg['etypes'].etype_class('CWUser')(self.request()) - unrelated = [r[0] for r in e.unrelated('in_group', 'CWGroup', 'subject')] - # should be default groups but owners, i.e. managers, users, guests - self.assertEqual(len(unrelated), 3) + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('CWUser')(req) + unrelated = [r[0] for r in e.unrelated('in_group', 'CWGroup', 'subject')] + # should be default groups but owners, i.e. managers, users, guests + self.assertEqual(len(unrelated), 3) def test_printable_value_string(self): - e = self.request().create_entity('Card', title=u'rest test', content=u'du :eid:`1:*ReST*`', - content_format=u'text/rest') - self.assertEqual(e.printable_value('content'), - '

du *ReST*

') - e.cw_attr_cache['content'] = 'du html users' - e.cw_attr_cache['content_format'] = 'text/html' - self.assertEqual(e.printable_value('content'), - 'du html users') - e.cw_attr_cache['content'] = 'du *texte*' - e.cw_attr_cache['content_format'] = 'text/plain' - self.assertEqual(e.printable_value('content'), - '

\ndu *texte*

') - e.cw_attr_cache['title'] = 'zou' - e.cw_attr_cache['content'] = '''\ + with self.admin_access.web_request() as req: + e = req.create_entity('Card', title=u'rest test', + content=u'du :eid:`1:*ReST*`', + content_format=u'text/rest') + self.assertEqual(e.printable_value('content'), + '

du *ReST*

') + e.cw_attr_cache['content'] = 'du html users' + e.cw_attr_cache['content_format'] = 'text/html' + self.assertEqual(e.printable_value('content'), + 'du html users') + e.cw_attr_cache['content'] = 'du *texte*' + e.cw_attr_cache['content_format'] = 'text/plain' + self.assertEqual(e.printable_value('content'), + '

\ndu *texte*

') + e.cw_attr_cache['title'] = 'zou' + e.cw_attr_cache['content'] = '''\ a title ======= du :eid:`1:*ReST*`''' - e.cw_attr_cache['content_format'] = 'text/rest' - self.assertEqual(e.printable_value('content', format='text/plain'), - e.cw_attr_cache['content']) + e.cw_attr_cache['content_format'] = 'text/rest' + self.assertEqual(e.printable_value('content', format='text/plain'), + e.cw_attr_cache['content']) - e.cw_attr_cache['content'] = u'yo (zou éà ;)' - e.cw_attr_cache['content_format'] = 'text/html' - self.assertEqual(e.printable_value('content', format='text/plain').strip(), - u'**yo (zou éà ;)**') - if HAS_TAL: - e.cw_attr_cache['content'] = '

titre

' - e.cw_attr_cache['content_format'] = 'text/cubicweb-page-template' - self.assertEqual(e.printable_value('content'), - '

zou

') + e.cw_attr_cache['content'] = u'yo (zou éà ;)' + e.cw_attr_cache['content_format'] = 'text/html' + self.assertEqual(e.printable_value('content', format='text/plain').strip(), + u'**yo (zou éà ;)**') + if HAS_TAL: + e.cw_attr_cache['content'] = '

titre

' + e.cw_attr_cache['content_format'] = 'text/cubicweb-page-template' + self.assertEqual(e.printable_value('content'), + '

zou

') def test_printable_value_bytes(self): - req = self.request() - e = req.create_entity('File', data=Binary('lambda x: 1'), data_format=u'text/x-python', - data_encoding=u'ascii', data_name=u'toto.py') - from cubicweb import mttransforms - if mttransforms.HAS_PYGMENTS_TRANSFORMS: - import pygments - if tuple(int(i) for i in pygments.__version__.split('.')[:2]) >= (1, 3): - self.assertEqual(e.printable_value('data'), - '''
lambda x: 1
+        with self.admin_access.web_request() as req:
+            e = req.create_entity('File', data=Binary('lambda x: 1'), data_format=u'text/x-python',
+                                  data_encoding=u'ascii', data_name=u'toto.py')
+            from cubicweb import mttransforms
+            if mttransforms.HAS_PYGMENTS_TRANSFORMS:
+                import pygments
+                if tuple(int(i) for i in pygments.__version__.split('.')[:2]) >= (1, 3):
+                    self.assertEqual(e.printable_value('data'),
+                                      '''
lambda x: 1
+
''') + else: + self.assertEqual(e.printable_value('data'), + '''
lambda x: 1
 
''') else: self.assertEqual(e.printable_value('data'), - '''
lambda x: 1
-
''') - else: - self.assertEqual(e.printable_value('data'), - '''
-lambda x: 1
+                                  '''
+    lambda x: 1
 
''') - e = req.create_entity('File', data=Binary('*héhéhé*'), data_format=u'text/rest', - data_encoding=u'utf-8', data_name=u'toto.txt') - self.assertEqual(e.printable_value('data'), - u'

héhéhé

') + e = req.create_entity('File', data=Binary('*héhéhé*'), data_format=u'text/rest', + data_encoding=u'utf-8', data_name=u'toto.txt') + self.assertEqual(e.printable_value('data'), + u'

héhéhé

') def test_printable_value_bad_html(self): """make sure we don't crash if we try to render invalid XHTML strings""" - req = self.request() - e = req.create_entity('Card', title=u'bad html', content=u'
R&D
', - content_format=u'text/html') - tidy = lambda x: x.replace('\n', '') - self.assertEqual(tidy(e.printable_value('content')), - '
R&D
') - e.cw_attr_cache['content'] = u'yo !! R&D
pas fermé' - self.assertEqual(tidy(e.printable_value('content')), - u'yo !! R&D
pas fermé
') - e.cw_attr_cache['content'] = u'R&D' - self.assertEqual(tidy(e.printable_value('content')), u'R&D') - e.cw_attr_cache['content'] = u'R&D;' - self.assertEqual(tidy(e.printable_value('content')), u'R&D;') - e.cw_attr_cache['content'] = u'yo !! R&D
pas fermé' - self.assertEqual(tidy(e.printable_value('content')), - u'yo !! R&D
pas fermé
') - e.cw_attr_cache['content'] = u'été
été' - self.assertEqual(tidy(e.printable_value('content')), - u'été
été
') - e.cw_attr_cache['content'] = u'C'est un exemple sérieux' - self.assertEqual(tidy(e.printable_value('content')), - u"C'est un exemple sérieux") - # make sure valid xhtml is left untouched - e.cw_attr_cache['content'] = u'
R&D
' - self.assertEqual(e.printable_value('content'), e.cw_attr_cache['content']) - e.cw_attr_cache['content'] = u'
été
' - self.assertEqual(e.printable_value('content'), e.cw_attr_cache['content']) - e.cw_attr_cache['content'] = u'été' - self.assertEqual(e.printable_value('content'), e.cw_attr_cache['content']) - e.cw_attr_cache['content'] = u'hop\r\nhop\nhip\rmomo' - self.assertEqual(e.printable_value('content'), u'hop\nhop\nhip\nmomo') + with self.admin_access.web_request() as req: + e = req.create_entity('Card', title=u'bad html', content=u'
R&D
', + content_format=u'text/html') + tidy = lambda x: x.replace('\n', '') + self.assertEqual(tidy(e.printable_value('content')), + '
R&D
') + e.cw_attr_cache['content'] = u'yo !! R&D
pas fermé' + self.assertEqual(tidy(e.printable_value('content')), + u'yo !! R&D
pas fermé
') + e.cw_attr_cache['content'] = u'R&D' + self.assertEqual(tidy(e.printable_value('content')), u'R&D') + e.cw_attr_cache['content'] = u'R&D;' + self.assertEqual(tidy(e.printable_value('content')), u'R&D;') + e.cw_attr_cache['content'] = u'yo !! R&D
pas fermé' + self.assertEqual(tidy(e.printable_value('content')), + u'yo !! R&D
pas fermé
') + e.cw_attr_cache['content'] = u'été
été' + self.assertEqual(tidy(e.printable_value('content')), + u'été
été
') + e.cw_attr_cache['content'] = u'C'est un exemple sérieux' + self.assertEqual(tidy(e.printable_value('content')), + u"C'est un exemple sérieux") + # make sure valid xhtml is left untouched + e.cw_attr_cache['content'] = u'
R&D
' + self.assertEqual(e.printable_value('content'), e.cw_attr_cache['content']) + e.cw_attr_cache['content'] = u'
été
' + self.assertEqual(e.printable_value('content'), e.cw_attr_cache['content']) + e.cw_attr_cache['content'] = u'été' + self.assertEqual(e.printable_value('content'), e.cw_attr_cache['content']) + e.cw_attr_cache['content'] = u'hop\r\nhop\nhip\rmomo' + self.assertEqual(e.printable_value('content'), u'hop\nhop\nhip\nmomo') def test_printable_value_bad_html_ms(self): - req = self.request() - e = req.create_entity('Card', title=u'bad html', content=u'
R&D
', - content_format=u'text/html') - tidy = lambda x: x.replace('\n', '') - e.cw_attr_cache['content'] = u'
ms orifice produces weird html
' - # Caution! current implementation of soup2xhtml strips first div element - content = soup2xhtml(e.printable_value('content'), 'utf-8') - self.assertMultiLineEqual(content, u'
ms orifice produces weird html
') + with self.admin_access.web_request() as req: + e = req.create_entity('Card', title=u'bad html', content=u'
R&D
', + content_format=u'text/html') + tidy = lambda x: x.replace('\n', '') + e.cw_attr_cache['content'] = u'
ms orifice produces weird html
' + # Caution! current implementation of soup2xhtml strips first div element + content = soup2xhtml(e.printable_value('content'), 'utf-8') + self.assertMultiLineEqual(content, u'
ms orifice produces weird html
') def test_fulltextindex(self): - e = self.vreg['etypes'].etype_class('File')(self.request()) - e.cw_attr_cache['description'] = 'du html' - e.cw_attr_cache['description_format'] = 'text/html' - e.cw_attr_cache['data'] = Binary('some data') - e.cw_attr_cache['data_name'] = 'an html file' - e.cw_attr_cache['data_format'] = 'text/html' - e.cw_attr_cache['data_encoding'] = 'ascii' - e._cw.transaction_data = {} # XXX req should be a session - words = e.cw_adapt_to('IFTIndexable').get_words() - words['C'].sort() - self.assertEqual({'C': sorted(['an', 'html', 'file', 'du', 'html', 'some', 'data'])}, - words) + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('File')(req) + e.cw_attr_cache['description'] = 'du html' + e.cw_attr_cache['description_format'] = 'text/html' + e.cw_attr_cache['data'] = Binary('some data') + e.cw_attr_cache['data_name'] = 'an html file' + e.cw_attr_cache['data_format'] = 'text/html' + e.cw_attr_cache['data_encoding'] = 'ascii' + e._cw.transaction_data.clear() + words = e.cw_adapt_to('IFTIndexable').get_words() + words['C'].sort() + self.assertEqual({'C': sorted(['an', 'html', 'file', 'du', 'html', 'some', 'data'])}, + words) def test_nonregr_relation_cache(self): - req = self.request() - p1 = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') - p2 = req.create_entity('Personne', nom=u'toto') - self.execute('SET X evaluee Y WHERE X nom "di mascio", Y nom "toto"') - self.assertEqual(p1.evaluee[0].nom, "toto") - self.assertTrue(not p1.reverse_evaluee) + with self.admin_access.web_request() as req: + p1 = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') + p2 = req.create_entity('Personne', nom=u'toto') + req.execute('SET X evaluee Y WHERE X nom "di mascio", Y nom "toto"') + self.assertEqual(p1.evaluee[0].nom, "toto") + self.assertFalse(p1.reverse_evaluee) def test_complete_relation(self): - session = self.session - eid = session.execute( - 'INSERT TrInfo X: X comment "zou", X wf_info_for U, X from_state S1, X to_state S2 ' - 'WHERE U login "admin", S1 name "activated", S2 name "deactivated"')[0][0] - trinfo = self.execute('Any X WHERE X eid %(x)s', {'x': eid}).get_entity(0, 0) - trinfo.complete() - self.assertTrue(isinstance(trinfo.cw_attr_cache['creation_date'], datetime)) - self.assertTrue(trinfo.cw_relation_cached('from_state', 'subject')) - self.assertTrue(trinfo.cw_relation_cached('to_state', 'subject')) - self.assertTrue(trinfo.cw_relation_cached('wf_info_for', 'subject')) - self.assertEqual(trinfo.by_transition, ()) + with self.admin_access.repo_cnx() as cnx: + eid = cnx.execute( + 'INSERT TrInfo X: X comment "zou", X wf_info_for U, X from_state S1, X to_state S2 ' + 'WHERE U login "admin", S1 name "activated", S2 name "deactivated"')[0][0] + trinfo = cnx.execute('Any X WHERE X eid %(x)s', {'x': eid}).get_entity(0, 0) + trinfo.complete() + self.assertIsInstance(trinfo.cw_attr_cache['creation_date'], datetime) + self.assertTrue(trinfo.cw_relation_cached('from_state', 'subject')) + self.assertTrue(trinfo.cw_relation_cached('to_state', 'subject')) + self.assertTrue(trinfo.cw_relation_cached('wf_info_for', 'subject')) + self.assertEqual(trinfo.by_transition, ()) def test_request_cache(self): - req = self.request() - user = self.execute('CWUser X WHERE X login "admin"', req=req).get_entity(0, 0) - state = user.in_state[0] - samestate = self.execute('State X WHERE X name "activated"', req=req).get_entity(0, 0) - self.assertTrue(state is samestate) + with self.admin_access.web_request() as req: + user = req.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) + state = user.in_state[0] + samestate = req.execute('State X WHERE X name "activated"').get_entity(0, 0) + self.assertIs(state, samestate) def test_rest_path(self): - req = self.request() - note = req.create_entity('Note', type=u'z') - self.assertEqual(note.rest_path(), 'note/%s' % note.eid) - # unique attr - tag = req.create_entity('Tag', name=u'x') - self.assertEqual(tag.rest_path(), 'tag/x') - # test explicit rest_attr - person = req.create_entity('Personne', prenom=u'john', nom=u'doe') - self.assertEqual(person.rest_path(), 'personne/doe') - # ambiguity test - person2 = req.create_entity('Personne', prenom=u'remi', nom=u'doe') - person.cw_clear_all_caches() - self.assertEqual(person.rest_path(), unicode(person.eid)) - self.assertEqual(person2.rest_path(), unicode(person2.eid)) - # unique attr with None value (nom in this case) - friend = req.create_entity('Ami', prenom=u'bob') - self.assertEqual(friend.rest_path(), unicode(friend.eid)) + with self.admin_access.web_request() as req: + note = req.create_entity('Note', type=u'z') + self.assertEqual(note.rest_path(), 'note/%s' % note.eid) + # unique attr + tag = req.create_entity('Tag', name=u'x') + self.assertEqual(tag.rest_path(), 'tag/x') + # test explicit rest_attr + person = req.create_entity('Personne', prenom=u'john', nom=u'doe') + self.assertEqual(person.rest_path(), 'personne/doe') + # ambiguity test + person2 = req.create_entity('Personne', prenom=u'remi', nom=u'doe') + person.cw_clear_all_caches() + self.assertEqual(person.rest_path(), unicode(person.eid)) + self.assertEqual(person2.rest_path(), unicode(person2.eid)) + # unique attr with None value (nom in this case) + friend = req.create_entity('Ami', prenom=u'bob') + self.assertEqual(friend.rest_path(), unicode(friend.eid)) + # 'ref' below is created without the unique but not required + # attribute, make sur that the unique _and_ required 'ean' is used + # as the rest attribute + ref = req.create_entity('Reference', ean=u'42-1337-42') + self.assertEqual(ref.rest_path(), 'reference/42-1337-42') def test_can_use_rest_path(self): self.assertTrue(can_use_rest_path(u'zobi')) @@ -732,66 +768,66 @@ self.assertFalse(can_use_rest_path(u'zo?bi')) def test_cw_set_attributes(self): - req = self.request() - person = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') - self.assertEqual(person.prenom, u'adrien') - self.assertEqual(person.nom, u'di mascio') - person.cw_set(prenom=u'sylvain', nom=u'thénault') - person = self.execute('Personne P').get_entity(0, 0) # XXX retreival needed ? - self.assertEqual(person.prenom, u'sylvain') - self.assertEqual(person.nom, u'thénault') + with self.admin_access.web_request() as req: + person = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') + self.assertEqual(person.prenom, u'adrien') + self.assertEqual(person.nom, u'di mascio') + person.cw_set(prenom=u'sylvain', nom=u'thénault') + person = req.execute('Personne P').get_entity(0, 0) # XXX retreival needed ? + self.assertEqual(person.prenom, u'sylvain') + self.assertEqual(person.nom, u'thénault') def test_cw_set_relations(self): - req = self.request() - person = req.create_entity('Personne', nom=u'chauvat', prenom=u'nicolas') - note = req.create_entity('Note', type=u'x') - note.cw_set(ecrit_par=person) - note = req.create_entity('Note', type=u'y') - note.cw_set(ecrit_par=person.eid) - self.assertEqual(len(person.reverse_ecrit_par), 2) + with self.admin_access.web_request() as req: + person = req.create_entity('Personne', nom=u'chauvat', prenom=u'nicolas') + note = req.create_entity('Note', type=u'x') + note.cw_set(ecrit_par=person) + note = req.create_entity('Note', type=u'y') + note.cw_set(ecrit_par=person.eid) + self.assertEqual(len(person.reverse_ecrit_par), 2) def test_metainformation_and_external_absolute_url(self): - req = self.request() - note = req.create_entity('Note', type=u'z') - metainf = note.cw_metainformation() - self.assertEqual(metainf, {'source': {'type': 'native', 'uri': 'system', - 'use-cwuri-as-url': False}, - 'type': u'Note', 'extid': None}) - self.assertEqual(note.absolute_url(), 'http://testing.fr/cubicweb/note/%s' % note.eid) - metainf['source'] = metainf['source'].copy() - metainf['source']['base-url'] = 'http://cubicweb2.com/' - metainf['extid'] = 1234 - self.assertEqual(note.absolute_url(), 'http://cubicweb2.com/note/1234') + with self.admin_access.web_request() as req: + note = req.create_entity('Note', type=u'z') + metainf = note.cw_metainformation() + self.assertEqual(metainf, {'source': {'type': 'native', 'uri': 'system', + 'use-cwuri-as-url': False}, + 'type': u'Note', 'extid': None}) + self.assertEqual(note.absolute_url(), 'http://testing.fr/cubicweb/note/%s' % note.eid) + metainf['source'] = metainf['source'].copy() + metainf['source']['base-url'] = 'http://cubicweb2.com/' + metainf['extid'] = 1234 + self.assertEqual(note.absolute_url(), 'http://cubicweb2.com/note/1234') def test_absolute_url_empty_field(self): - req = self.request() - card = req.create_entity('Card', wikiid=u'', title=u'test') - self.assertEqual(card.absolute_url(), - 'http://testing.fr/cubicweb/%s' % card.eid) + with self.admin_access.web_request() as req: + card = req.create_entity('Card', wikiid=u'', title=u'test') + self.assertEqual(card.absolute_url(), + 'http://testing.fr/cubicweb/%s' % card.eid) def test_create_and_compare_entity(self): - req = self.request() - p1 = req.create_entity('Personne', nom=u'fayolle', prenom=u'alexandre') - p2 = req.create_entity('Personne', nom=u'campeas', prenom=u'aurelien') - note = req.create_entity('Note', type=u'z') - req = self.request() - p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien', - connait=p1, evaluee=[p1, p2], - reverse_ecrit_par=note) - self.assertEqual(p.nom, 'di mascio') - self.assertEqual([c.nom for c in p.connait], ['fayolle']) - self.assertEqual(sorted([c.nom for c in p.evaluee]), ['campeas', 'fayolle']) - self.assertEqual([c.type for c in p.reverse_ecrit_par], ['z']) - - req = self.request() - auc = req.execute('Personne P WHERE P prenom "aurelien"').get_entity(0,0) - persons = set() - persons.add(p1) - persons.add(p2) - persons.add(auc) - self.assertEqual(2, len(persons)) - self.assertNotEqual(p1, p2) - self.assertEqual(p2, auc) + access = self.admin_access + with access.web_request() as req: + p1 = req.create_entity('Personne', nom=u'fayolle', prenom=u'alexandre') + p2 = req.create_entity('Personne', nom=u'campeas', prenom=u'aurelien') + note = req.create_entity('Note', type=u'z') + p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien', + connait=p1, evaluee=[p1, p2], + reverse_ecrit_par=note) + self.assertEqual(p.nom, 'di mascio') + self.assertEqual([c.nom for c in p.connait], ['fayolle']) + self.assertEqual(sorted([c.nom for c in p.evaluee]), ['campeas', 'fayolle']) + self.assertEqual([c.type for c in p.reverse_ecrit_par], ['z']) + req.cnx.commit() + with access.web_request() as req: + auc = req.execute('Personne P WHERE P prenom "aurelien"').get_entity(0,0) + persons = set() + persons.add(p1) + persons.add(p2) + persons.add(auc) + self.assertEqual(2, len(persons)) + self.assertNotEqual(p1, p2) + self.assertEqual(p2, auc) if __name__ == '__main__': diff -r 84738d495ffd -r 793377697c81 test/unittest_migration.py --- a/test/unittest_migration.py Wed Sep 24 17:35:59 2014 +0200 +++ b/test/unittest_migration.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -76,7 +76,7 @@ def test_filter_scripts_for_mode(self): config = CubicWebConfiguration('data') config.verbosity = 0 - self.assert_(not isinstance(config.migration_handler(), ServerMigrationHelper)) + self.assertNotIsInstance(config.migration_handler(), ServerMigrationHelper) self.assertIsInstance(config.migration_handler(), MigrationHelper) config = self.config config.__class__.name = 'repository' @@ -99,16 +99,15 @@ def test_db_creation(self): """make sure database can be created""" config = ApptestConfiguration('data', apphome=self.datadir) - source = config.sources()['system'] + source = config.system_source_config self.assertEqual(source['db-driver'], 'sqlite') handler = get_test_db_handler(config) handler.init_test_database() handler.build_db_cache() repo, cnx = handler.get_repo_and_cnx() - cu = cnx.cursor() - self.assertEqual(cu.execute('Any SN WHERE X is CWUser, X login "admin", X in_state S, S name SN').rows, - [['activated']]) - cnx.close() + with cnx: + self.assertEqual(cnx.execute('Any SN WHERE X is CWUser, X login "admin", X in_state S, S name SN').rows, + [['activated']]) repo.shutdown() if __name__ == '__main__': diff -r 84738d495ffd -r 793377697c81 test/unittest_predicates.py --- a/test/unittest_predicates.py Wed Sep 24 17:35:59 2014 +0200 +++ b/test/unittest_predicates.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -18,7 +18,10 @@ """unit tests for selectors mechanism""" from operator import eq, lt, le, gt +from contextlib import contextmanager + from logilab.common.testlib import TestCase, unittest_main +from logilab.common.decorators import clear_cache from cubicweb import Binary from cubicweb.devtools.testlib import CubicWebTC @@ -31,54 +34,39 @@ -class ImplementsSelectorTC(CubicWebTC): +class ImplementsTC(CubicWebTC): def test_etype_priority(self): - req = self.request() - f = req.create_entity('File', data_name=u'hop.txt', data=Binary('hop')) - rset = f.as_rset() - anyscore = is_instance('Any')(f.__class__, req, rset=rset) - idownscore = adaptable('IDownloadable')(f.__class__, req, rset=rset) - self.assertTrue(idownscore > anyscore, (idownscore, anyscore)) - filescore = is_instance('File')(f.__class__, req, rset=rset) - self.assertTrue(filescore > idownscore, (filescore, idownscore)) + with self.admin_access.web_request() as req: + f = req.create_entity('File', data_name=u'hop.txt', data=Binary('hop')) + rset = f.as_rset() + anyscore = is_instance('Any')(f.__class__, req, rset=rset) + idownscore = adaptable('IDownloadable')(f.__class__, req, rset=rset) + self.assertTrue(idownscore > anyscore, (idownscore, anyscore)) + filescore = is_instance('File')(f.__class__, req, rset=rset) + self.assertTrue(filescore > idownscore, (filescore, idownscore)) def test_etype_inheritance_no_yams_inheritance(self): cls = self.vreg['etypes'].etype_class('Personne') - self.assertFalse(is_instance('Societe').score_class(cls, self.request())) + with self.admin_access.web_request() as req: + self.assertFalse(is_instance('Societe').score_class(cls, req)) def test_yams_inheritance(self): cls = self.vreg['etypes'].etype_class('Transition') - self.assertEqual(is_instance('BaseTransition').score_class(cls, self.request()), - 3) + with self.admin_access.web_request() as req: + self.assertEqual(is_instance('BaseTransition').score_class(cls, req), + 3) def test_outer_join(self): - req = self.request() - rset = req.execute('Any U,B WHERE B? bookmarked_by U, U login "anon"') - self.assertEqual(is_instance('Bookmark')(None, req, rset=rset, row=0, col=1), - 0) + with self.admin_access.web_request() as req: + rset = req.execute('Any U,B WHERE B? bookmarked_by U, U login "anon"') + self.assertEqual(is_instance('Bookmark')(None, req, rset=rset, row=0, col=1), + 0) class WorkflowSelectorTC(CubicWebTC): - def _commit(self): - self.commit() - self.wf_entity.cw_clear_all_caches() - - def setup_database(self): - wf = self.shell().add_workflow("wf_test", 'StateFull', default=True) - created = wf.add_state('created', initial=True) - validated = wf.add_state('validated') - abandoned = wf.add_state('abandoned') - wf.add_transition('validate', created, validated, ('managers',)) - wf.add_transition('forsake', (created, validated,), abandoned, ('managers',)) def setUp(self): super(WorkflowSelectorTC, self).setUp() - self.req = self.request() - self.wf_entity = self.req.create_entity('StateFull', name=u'') - self.rset = self.wf_entity.as_rset() - self.adapter = self.wf_entity.cw_adapt_to('IWorkflowable') - self._commit() - self.assertEqual(self.adapter.state, 'created') # enable debug mode to state/transition validation on the fly self.vreg.config.debugmode = True @@ -86,122 +74,154 @@ self.vreg.config.debugmode = False super(WorkflowSelectorTC, self).tearDown() + def setup_database(self): + with self.admin_access.shell() as shell: + wf = shell.add_workflow("wf_test", 'StateFull', default=True) + created = wf.add_state('created', initial=True) + validated = wf.add_state('validated') + abandoned = wf.add_state('abandoned') + wf.add_transition('validate', created, validated, ('managers',)) + wf.add_transition('forsake', (created, validated,), abandoned, ('managers',)) + + @contextmanager + def statefull_stuff(self): + with self.admin_access.web_request() as req: + wf_entity = req.create_entity('StateFull', name=u'') + rset = wf_entity.as_rset() + adapter = wf_entity.cw_adapt_to('IWorkflowable') + req.cnx.commit() + self.assertEqual(adapter.state, 'created') + yield req, wf_entity, rset, adapter + def test_is_in_state(self): - for state in ('created', 'validated', 'abandoned'): - selector = is_in_state(state) - self.assertEqual(selector(None, self.req, rset=self.rset), - state=="created") - - self.adapter.fire_transition('validate') - self._commit() - self.assertEqual(self.adapter.state, 'validated') + with self.statefull_stuff() as (req, wf_entity, rset, adapter): + for state in ('created', 'validated', 'abandoned'): + selector = is_in_state(state) + self.assertEqual(selector(None, req, rset=rset), + state=="created") - selector = is_in_state('created') - self.assertEqual(selector(None, self.req, rset=self.rset), 0) - selector = is_in_state('validated') - self.assertEqual(selector(None, self.req, rset=self.rset), 1) - selector = is_in_state('validated', 'abandoned') - self.assertEqual(selector(None, self.req, rset=self.rset), 1) - selector = is_in_state('abandoned') - self.assertEqual(selector(None, self.req, rset=self.rset), 0) + adapter.fire_transition('validate') + req.cnx.commit(); wf_entity.cw_clear_all_caches() + self.assertEqual(adapter.state, 'validated') + + clear_cache(rset, 'get_entity') + + selector = is_in_state('created') + self.assertEqual(selector(None, req, rset=rset), 0) + selector = is_in_state('validated') + self.assertEqual(selector(None, req, rset=rset), 1) + selector = is_in_state('validated', 'abandoned') + self.assertEqual(selector(None, req, rset=rset), 1) + selector = is_in_state('abandoned') + self.assertEqual(selector(None, req, rset=rset), 0) - self.adapter.fire_transition('forsake') - self._commit() - self.assertEqual(self.adapter.state, 'abandoned') + adapter.fire_transition('forsake') + req.cnx.commit(); wf_entity.cw_clear_all_caches() + self.assertEqual(adapter.state, 'abandoned') + + clear_cache(rset, 'get_entity') - selector = is_in_state('created') - self.assertEqual(selector(None, self.req, rset=self.rset), 0) - selector = is_in_state('validated') - self.assertEqual(selector(None, self.req, rset=self.rset), 0) - selector = is_in_state('validated', 'abandoned') - self.assertEqual(selector(None, self.req, rset=self.rset), 1) - self.assertEqual(self.adapter.state, 'abandoned') - self.assertEqual(selector(None, self.req, rset=self.rset), 1) + selector = is_in_state('created') + self.assertEqual(selector(None, req, rset=rset), 0) + selector = is_in_state('validated') + self.assertEqual(selector(None, req, rset=rset), 0) + selector = is_in_state('validated', 'abandoned') + self.assertEqual(selector(None, req, rset=rset), 1) + self.assertEqual(adapter.state, 'abandoned') + self.assertEqual(selector(None, req, rset=rset), 1) def test_is_in_state_unvalid_names(self): - selector = is_in_state("unknown") - with self.assertRaises(ValueError) as cm: - selector(None, self.req, rset=self.rset) - self.assertEqual(str(cm.exception), - "wf_test: unknown state(s): unknown") - selector = is_in_state("weird", "unknown", "created", "weird") - with self.assertRaises(ValueError) as cm: - selector(None, self.req, rset=self.rset) - self.assertEqual(str(cm.exception), - "wf_test: unknown state(s): unknown,weird") + with self.statefull_stuff() as (req, wf_entity, rset, adapter): + selector = is_in_state("unknown") + with self.assertRaises(ValueError) as cm: + selector(None, req, rset=rset) + self.assertEqual(str(cm.exception), + "wf_test: unknown state(s): unknown") + selector = is_in_state("weird", "unknown", "created", "weird") + with self.assertRaises(ValueError) as cm: + selector(None, req, rset=rset) + self.assertEqual(str(cm.exception), + "wf_test: unknown state(s): unknown,weird") def test_on_transition(self): - for transition in ('validate', 'forsake'): - selector = on_transition(transition) - self.assertEqual(selector(None, self.req, rset=self.rset), 0) + with self.statefull_stuff() as (req, wf_entity, rset, adapter): + for transition in ('validate', 'forsake'): + selector = on_transition(transition) + self.assertEqual(selector(None, req, rset=rset), 0) - self.adapter.fire_transition('validate') - self._commit() - self.assertEqual(self.adapter.state, 'validated') + adapter.fire_transition('validate') + req.cnx.commit(); wf_entity.cw_clear_all_caches() + self.assertEqual(adapter.state, 'validated') + + clear_cache(rset, 'get_entity') - selector = on_transition("validate") - self.assertEqual(selector(None, self.req, rset=self.rset), 1) - selector = on_transition("validate", "forsake") - self.assertEqual(selector(None, self.req, rset=self.rset), 1) - selector = on_transition("forsake") - self.assertEqual(selector(None, self.req, rset=self.rset), 0) + selector = on_transition("validate") + self.assertEqual(selector(None, req, rset=rset), 1) + selector = on_transition("validate", "forsake") + self.assertEqual(selector(None, req, rset=rset), 1) + selector = on_transition("forsake") + self.assertEqual(selector(None, req, rset=rset), 0) - self.adapter.fire_transition('forsake') - self._commit() - self.assertEqual(self.adapter.state, 'abandoned') + adapter.fire_transition('forsake') + req.cnx.commit(); wf_entity.cw_clear_all_caches() + self.assertEqual(adapter.state, 'abandoned') + + clear_cache(rset, 'get_entity') - selector = on_transition("validate") - self.assertEqual(selector(None, self.req, rset=self.rset), 0) - selector = on_transition("validate", "forsake") - self.assertEqual(selector(None, self.req, rset=self.rset), 1) - selector = on_transition("forsake") - self.assertEqual(selector(None, self.req, rset=self.rset), 1) + selector = on_transition("validate") + self.assertEqual(selector(None, req, rset=rset), 0) + selector = on_transition("validate", "forsake") + self.assertEqual(selector(None, req, rset=rset), 1) + selector = on_transition("forsake") + self.assertEqual(selector(None, req, rset=rset), 1) def test_on_transition_unvalid_names(self): - selector = on_transition("unknown") - with self.assertRaises(ValueError) as cm: - selector(None, self.req, rset=self.rset) - self.assertEqual(str(cm.exception), - "wf_test: unknown transition(s): unknown") - selector = on_transition("weird", "unknown", "validate", "weird") - with self.assertRaises(ValueError) as cm: - selector(None, self.req, rset=self.rset) - self.assertEqual(str(cm.exception), - "wf_test: unknown transition(s): unknown,weird") + with self.statefull_stuff() as (req, wf_entity, rset, adapter): + selector = on_transition("unknown") + with self.assertRaises(ValueError) as cm: + selector(None, req, rset=rset) + self.assertEqual(str(cm.exception), + "wf_test: unknown transition(s): unknown") + selector = on_transition("weird", "unknown", "validate", "weird") + with self.assertRaises(ValueError) as cm: + selector(None, req, rset=rset) + self.assertEqual(str(cm.exception), + "wf_test: unknown transition(s): unknown,weird") def test_on_transition_with_no_effect(self): """selector will not be triggered with `change_state()`""" - self.adapter.change_state('validated') - self._commit() - self.assertEqual(self.adapter.state, 'validated') + with self.statefull_stuff() as (req, wf_entity, rset, adapter): + adapter.change_state('validated') + req.cnx.commit(); wf_entity.cw_clear_all_caches() + self.assertEqual(adapter.state, 'validated') - selector = on_transition("validate") - self.assertEqual(selector(None, self.req, rset=self.rset), 0) - selector = on_transition("validate", "forsake") - self.assertEqual(selector(None, self.req, rset=self.rset), 0) - selector = on_transition("forsake") - self.assertEqual(selector(None, self.req, rset=self.rset), 0) + selector = on_transition("validate") + self.assertEqual(selector(None, req, rset=rset), 0) + selector = on_transition("validate", "forsake") + self.assertEqual(selector(None, req, rset=rset), 0) + selector = on_transition("forsake") + self.assertEqual(selector(None, req, rset=rset), 0) class RelationPossibleTC(CubicWebTC): def test_rqlst_1(self): - req = self.request() - selector = relation_possible('in_group') - select = self.vreg.parse(req, 'Any X WHERE X is CWUser').children[0] - score = selector(None, req, rset=1, - select=select, filtered_variable=select.defined_vars['X']) - self.assertEqual(score, 1) + with self.admin_access.web_request() as req: + selector = relation_possible('in_group') + select = self.vreg.parse(req, 'Any X WHERE X is CWUser').children[0] + score = selector(None, req, rset=1, + select=select, filtered_variable=select.defined_vars['X']) + self.assertEqual(score, 1) def test_rqlst_2(self): - req = self.request() - selector = relation_possible('in_group') - select = self.vreg.parse(req, 'Any 1, COUNT(X) WHERE X is CWUser, X creation_date XD, ' - 'Y creation_date YD, Y is CWGroup ' - 'HAVING DAY(XD)=DAY(YD)').children[0] - score = selector(None, req, rset=1, - select=select, filtered_variable=select.defined_vars['X']) - self.assertEqual(score, 1) + with self.admin_access.web_request() as req: + selector = relation_possible('in_group') + select = self.vreg.parse(req, 'Any 1, COUNT(X) WHERE X is CWUser, X creation_date XD, ' + 'Y creation_date YD, Y is CWGroup ' + 'HAVING DAY(XD)=DAY(YD)').children[0] + score = selector(None, req, rset=1, + select=select, filtered_variable=select.defined_vars['X']) + self.assertEqual(score, 1) def test_ambiguous(self): # Ambiguous relations are : @@ -210,10 +230,11 @@ # checking case. selector = relation_possible('fabrique_par', role='object', target_etype='Personne', strict=True) - req = self.request() - usine = req.create_entity('Usine', lieu=u'here') - score = selector(None, req, rset=usine.as_rset()) - self.assertEqual(0, score) + with self.admin_access.web_request() as req: + usine = req.create_entity('Usine', lieu=u'here') + score = selector(None, req, rset=usine.as_rset()) + self.assertEqual(0, score) + class MatchUserGroupsTC(CubicWebTC): def test_owners_group(self): @@ -227,79 +248,85 @@ SomeAction.__registered__(self.vreg['actions']) self.assertTrue(SomeAction in self.vreg['actions']['yo'], self.vreg['actions']) try: + with self.admin_access.web_request() as req: + self.create_user(req, 'john') # login as a simple user - req = self.request() - self.create_user(req, 'john') - self.login('john') - # it should not be possible to use SomeAction not owned objects - req = self.request() - rset = req.execute('Any G WHERE G is CWGroup, G name "managers"') - self.assertFalse('yo' in dict(self.pactions(req, rset))) - # insert a new card, and check that we can use SomeAction on our object - self.execute('INSERT Card C: C title "zoubidou"') - self.commit() - req = self.request() - rset = req.execute('Card C WHERE C title "zoubidou"') - self.assertTrue('yo' in dict(self.pactions(req, rset)), self.pactions(req, rset)) + john_access = self.new_access('john') + with john_access.web_request() as req: + # it should not be possible to use SomeAction not owned objects + rset = req.execute('Any G WHERE G is CWGroup, G name "managers"') + self.assertFalse('yo' in dict(self.pactions(req, rset))) + # insert a new card, and check that we can use SomeAction on our object + req.execute('INSERT Card C: C title "zoubidou"') + req.cnx.commit() + with john_access.web_request() as req: + rset = req.execute('Card C WHERE C title "zoubidou"') + self.assertTrue('yo' in dict(self.pactions(req, rset)), self.pactions(req, rset)) # make sure even managers can't use the action - self.restore_connection() - req = self.request() - rset = req.execute('Card C WHERE C title "zoubidou"') - self.assertFalse('yo' in dict(self.pactions(req, rset))) + with self.admin_access.web_request() as req: + rset = req.execute('Card C WHERE C title "zoubidou"') + self.assertFalse('yo' in dict(self.pactions(req, rset))) finally: del self.vreg[SomeAction.__registry__][SomeAction.__regid__] -class MultiLinesRsetSelectorTC(CubicWebTC): - def setUp(self): - super(MultiLinesRsetSelectorTC, self).setUp() - self.req = self.request() - self.req.execute('INSERT CWGroup G: G name "group1"') - self.req.execute('INSERT CWGroup G: G name "group2"') - self.commit() - self.rset = self.req.execute('Any G WHERE G is CWGroup') +class MultiLinesRsetTC(CubicWebTC): + def setup_database(self): + with self.admin_access.web_request() as req: + req.execute('INSERT CWGroup G: G name "group1"') + req.execute('INSERT CWGroup G: G name "group2"') + req.cnx.commit() def test_default_op_in_selector(self): - expected = len(self.rset) - selector = multi_lines_rset(expected) - self.assertEqual(selector(None, self.req, rset=self.rset), 1) - self.assertEqual(selector(None, self.req, None), 0) - selector = multi_lines_rset(expected + 1) - self.assertEqual(selector(None, self.req, rset=self.rset), 0) - self.assertEqual(selector(None, self.req, None), 0) - selector = multi_lines_rset(expected - 1) - self.assertEqual(selector(None, self.req, rset=self.rset), 0) - self.assertEqual(selector(None, self.req, None), 0) + with self.admin_access.web_request() as req: + rset = req.execute('Any G WHERE G is CWGroup') + expected = len(rset) + selector = multi_lines_rset(expected) + self.assertEqual(selector(None, req, rset=rset), 1) + self.assertEqual(selector(None, req, None), 0) + selector = multi_lines_rset(expected + 1) + self.assertEqual(selector(None, req, rset=rset), 0) + self.assertEqual(selector(None, req, None), 0) + selector = multi_lines_rset(expected - 1) + self.assertEqual(selector(None, req, rset=rset), 0) + self.assertEqual(selector(None, req, None), 0) def test_without_rset(self): - expected = len(self.rset) - selector = multi_lines_rset(expected) - self.assertEqual(selector(None, self.req, None), 0) - selector = multi_lines_rset(expected + 1) - self.assertEqual(selector(None, self.req, None), 0) - selector = multi_lines_rset(expected - 1) - self.assertEqual(selector(None, self.req, None), 0) + with self.admin_access.web_request() as req: + rset = req.execute('Any G WHERE G is CWGroup') + expected = len(rset) + selector = multi_lines_rset(expected) + self.assertEqual(selector(None, req, None), 0) + selector = multi_lines_rset(expected + 1) + self.assertEqual(selector(None, req, None), 0) + selector = multi_lines_rset(expected - 1) + self.assertEqual(selector(None, req, None), 0) def test_with_operators(self): - expected = len(self.rset) + with self.admin_access.web_request() as req: + rset = req.execute('Any G WHERE G is CWGroup') + expected = len(rset) - # Format 'expected', 'operator', 'assert' - testdata = (( expected, eq, 1), - ( expected+1, eq, 0), - ( expected-1, eq, 0), - ( expected, le, 1), - ( expected+1, le, 1), - ( expected-1, le, 0), - ( expected-1, gt, 1), - ( expected, gt, 0), - ( expected+1, gt, 0), - ( expected+1, lt, 1), - ( expected, lt, 0), - ( expected-1, lt, 0)) + # Format 'expected', 'operator', 'assert' + testdata = (( expected, eq, 1), + ( expected+1, eq, 0), + ( expected-1, eq, 0), + ( expected, le, 1), + ( expected+1, le, 1), + ( expected-1, le, 0), + ( expected-1, gt, 1), + ( expected, gt, 0), + ( expected+1, gt, 0), + ( expected+1, lt, 1), + ( expected, lt, 0), + ( expected-1, lt, 0)) - for (expected, operator, assertion) in testdata: - selector = multi_lines_rset(expected, operator) - yield self.assertEqual, selector(None, self.req, rset=self.rset), assertion + for (expected, operator, assertion) in testdata: + selector = multi_lines_rset(expected, operator) + yield self.assertEqual, selector(None, req, rset=rset), assertion + + +class MatchKwargsTC(TestCase): def test_match_kwargs_default(self): selector = match_kwargs( set( ('a', 'b') ) ) @@ -316,37 +343,37 @@ self.assertEqual(selector(None, None, a=1, c=1), 1) -class ScoreEntitySelectorTC(CubicWebTC): +class ScoreEntityTC(CubicWebTC): def test_intscore_entity_selector(self): - req = self.request() - rset = req.execute('Any E WHERE E eid 1') - selector = score_entity(lambda x: None) - self.assertEqual(selector(None, req, rset=rset), 0) - selector = score_entity(lambda x: "something") - self.assertEqual(selector(None, req, rset=rset), 1) - selector = score_entity(lambda x: object) - self.assertEqual(selector(None, req, rset=rset), 1) - rset = req.execute('Any G LIMIT 2 WHERE G is CWGroup') - selector = score_entity(lambda x: 10) - self.assertEqual(selector(None, req, rset=rset), 20) - selector = score_entity(lambda x: 10, mode='any') - self.assertEqual(selector(None, req, rset=rset), 10) + with self.admin_access.web_request() as req: + rset = req.execute('Any E WHERE E eid 1') + selector = score_entity(lambda x: None) + self.assertEqual(selector(None, req, rset=rset), 0) + selector = score_entity(lambda x: "something") + self.assertEqual(selector(None, req, rset=rset), 1) + selector = score_entity(lambda x: object) + self.assertEqual(selector(None, req, rset=rset), 1) + rset = req.execute('Any G LIMIT 2 WHERE G is CWGroup') + selector = score_entity(lambda x: 10) + self.assertEqual(selector(None, req, rset=rset), 20) + selector = score_entity(lambda x: 10, mode='any') + self.assertEqual(selector(None, req, rset=rset), 10) def test_rql_condition_entity(self): - req = self.request() - selector = rql_condition('X identity U') - rset = req.user.as_rset() - self.assertEqual(selector(None, req, rset=rset), 1) - self.assertEqual(selector(None, req, entity=req.user), 1) - self.assertEqual(selector(None, req), 0) + with self.admin_access.web_request() as req: + selector = rql_condition('X identity U') + rset = req.user.as_rset() + self.assertEqual(selector(None, req, rset=rset), 1) + self.assertEqual(selector(None, req, entity=req.user), 1) + self.assertEqual(selector(None, req), 0) def test_rql_condition_user(self): - req = self.request() - selector = rql_condition('U login "admin"', user_condition=True) - self.assertEqual(selector(None, req), 1) - selector = rql_condition('U login "toto"', user_condition=True) - self.assertEqual(selector(None, req), 0) + with self.admin_access.web_request() as req: + selector = rql_condition('U login "admin"', user_condition=True) + self.assertEqual(selector(None, req), 1) + selector = rql_condition('U login "toto"', user_condition=True) + self.assertEqual(selector(None, req), 0) class AdaptablePredicateTC(CubicWebTC): @@ -359,10 +386,10 @@ __regid__ = 'IWhatever' __select__ = is_instance('CWGroup') with self.temporary_appobjects(CWUserIWhatever, CWGroupIWhatever): - req = self.request() - selector = adaptable('IWhatever') - rset = req.execute('Any X WHERE X is IN(CWGroup, CWUser)') - self.assertTrue(selector(None, req, rset=rset)) + with self.admin_access.web_request() as req: + selector = adaptable('IWhatever') + rset = req.execute('Any X WHERE X is IN(CWGroup, CWUser)') + self.assertTrue(selector(None, req, rset=rset)) if __name__ == '__main__': unittest_main() diff -r 84738d495ffd -r 793377697c81 test/unittest_repoapi.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/unittest_repoapi.py Wed Sep 24 18:04:30 2014 +0200 @@ -0,0 +1,89 @@ +# copyright 2013-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unittest for cubicweb.dbapi""" + + +from cubicweb.devtools.testlib import CubicWebTC + +from cubicweb import ProgrammingError +from cubicweb.repoapi import ClientConnection, connect, anonymous_cnx + + +class REPOAPITC(CubicWebTC): + + def test_clt_cnx_basic_usage(self): + """Test that a client connection can be used to access the database""" + with self.admin_access.client_cnx() as cltcnx: + # (1) some RQL request + rset = cltcnx.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + # (2) ORM usage + random_user = rset.get_entity(0, 0) + # (3) Write operation + random_user.cw_set(surname=u'babar') + # (4) commit + cltcnx.commit() + rset = cltcnx.execute('''Any X WHERE X is CWUser, + X surname "babar" + ''') + self.assertTrue(rset) + # prepare test for implicit rollback + random_user = rset.get_entity(0, 0) + random_user.cw_set(surname=u'celestine') + # implicit rollback on exit + with self.admin_access.client_cnx() as cltcnx: + rset = cltcnx.execute('''Any X WHERE X is CWUser, + X surname "babar" + ''') + self.assertTrue(rset) + + def test_clt_cnx_life_cycle(self): + """Check that ClientConnection requires explicit open and close + """ + access = self.admin_access + cltcnx = ClientConnection(access._session) + # connection not open yet + with self.assertRaises(ProgrammingError): + cltcnx.execute('Any X WHERE X is CWUser') + # connection open and working + with cltcnx: + cltcnx.execute('Any X WHERE X is CWUser') + # connection closed + with self.assertRaises(ProgrammingError): + cltcnx.execute('Any X WHERE X is CWUser') + + def test_connect(self): + """check that repoapi.connect works and returns a usable connection""" + clt_cnx = connect(self.repo, login='admin', password='gingkow') + self.assertEqual('admin', clt_cnx.user.login) + with clt_cnx: + rset = clt_cnx.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + + def test_anonymous_connect(self): + """check that you can get anonymous connection when the data exist""" + clt_cnx = anonymous_cnx(self.repo) + self.assertEqual('anon', clt_cnx.user.login) + with clt_cnx: + rset = clt_cnx.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 84738d495ffd -r 793377697c81 test/unittest_req.py --- a/test/unittest_req.py Wed Sep 24 17:35:59 2014 +0200 +++ b/test/unittest_req.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -56,94 +56,98 @@ def test_base_url(self): base_url = self.config['base-url'] - self.assertEqual(self.session.base_url(), base_url) - assert 'https-url' not in self.config - self.assertEqual(self.session.base_url(secure=True), base_url) - secure_base_url = base_url.replace('http', 'https') - self.config.global_set_option('https-url', secure_base_url) - self.assertEqual(self.session.base_url(secure=True), secure_base_url) + with self.admin_access.repo_cnx() as session: + self.assertEqual(session.base_url(), base_url) + assert 'https-url' not in self.config + self.assertEqual(session.base_url(secure=True), base_url) + secure_base_url = base_url.replace('http', 'https') + self.config.global_set_option('https-url', secure_base_url) + self.assertEqual(session.base_url(secure=True), secure_base_url) def test_view_catch_ex(self): - req = self.request() - rset = self.execute('CWUser X WHERE X login "hop"') - self.assertEqual(req.view('oneline', rset, 'null'), '') - self.assertRaises(ObjectNotFound, req.view, 'onelinee', rset, 'null') + with self.admin_access.web_request() as req: + rset = req.execute('CWUser X WHERE X login "hop"') + self.assertEqual(req.view('oneline', rset, 'null'), '') + self.assertRaises(ObjectNotFound, req.view, 'onelinee', rset, 'null') def test_find_one_entity(self): - self.request().create_entity( - 'CWUser', login=u'cdevienne', upassword=u'cdevienne', - surname=u'de Vienne', firstname=u'Christophe', - in_group=self.request().find('CWGroup', name=u'users').one()) + with self.admin_access.web_request() as req: + req.create_entity( + 'CWUser', login=u'cdevienne', upassword=u'cdevienne', + surname=u'de Vienne', firstname=u'Christophe', + in_group=req.find('CWGroup', name=u'users').one()) - self.request().create_entity( - 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', - firstname=u'adrien', - in_group=self.request().find('CWGroup', name=u'users').one()) + req.create_entity( + 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', + firstname=u'adrien', + in_group=req.find('CWGroup', name=u'users').one()) - u = self.request().find_one_entity('CWUser', login=u'cdevienne') - self.assertEqual(u.firstname, u"Christophe") + u = req.find_one_entity('CWUser', login=u'cdevienne') + self.assertEqual(u.firstname, u"Christophe") - with self.assertRaises(FindEntityError): - self.request().find_one_entity('CWUser', login=u'patanok') + with self.assertRaises(FindEntityError): + req.find_one_entity('CWUser', login=u'patanok') - with self.assertRaises(FindEntityError): - self.request().find_one_entity('CWUser') + with self.assertRaises(FindEntityError): + req.find_one_entity('CWUser') def test_find_entities(self): - self.request().create_entity( - 'CWUser', login=u'cdevienne', upassword=u'cdevienne', - surname=u'de Vienne', firstname=u'Christophe', - in_group=self.request().find('CWGroup', name=u'users').one()) - - self.request().create_entity( - 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', - firstname=u'adrien', - in_group=self.request().find('CWGroup', name=u'users').one()) + with self.admin_access.web_request() as req: + req.create_entity( + 'CWUser', login=u'cdevienne', upassword=u'cdevienne', + surname=u'de Vienne', firstname=u'Christophe', + in_group=req.find('CWGroup', name=u'users').one()) - l = list(self.request().find_entities('CWUser', login=u'cdevienne')) - self.assertEqual(1, len(l)) - self.assertEqual(l[0].firstname, u"Christophe") + req.create_entity( + 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', + firstname=u'adrien', + in_group=req.find('CWGroup', name=u'users').one()) - l = list(self.request().find_entities('CWUser', login=u'patanok')) - self.assertEqual(0, len(l)) + l = list(req.find_entities('CWUser', login=u'cdevienne')) + self.assertEqual(1, len(l)) + self.assertEqual(l[0].firstname, u"Christophe") - l = list(self.request().find_entities('CWUser')) - self.assertEqual(4, len(l)) + l = list(req.find_entities('CWUser', login=u'patanok')) + self.assertEqual(0, len(l)) + + l = list(req.find_entities('CWUser')) + self.assertEqual(4, len(l)) def test_find(self): - self.request().create_entity( - 'CWUser', login=u'cdevienne', upassword=u'cdevienne', - surname=u'de Vienne', firstname=u'Christophe', - in_group=self.request().find('CWGroup', name=u'users').one()) + with self.admin_access.web_request() as req: + req.create_entity( + 'CWUser', login=u'cdevienne', upassword=u'cdevienne', + surname=u'de Vienne', firstname=u'Christophe', + in_group=req.find('CWGroup', name=u'users').one()) - self.request().create_entity( - 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', - firstname=u'adrien', - in_group=self.request().find('CWGroup', name=u'users').one()) + req.create_entity( + 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', + firstname=u'adrien', + in_group=req.find('CWGroup', name=u'users').one()) - u = self.request().find('CWUser', login=u'cdevienne').one() - self.assertEqual(u.firstname, u"Christophe") + u = req.find('CWUser', login=u'cdevienne').one() + self.assertEqual(u.firstname, u"Christophe") - users = list(self.request().find('CWUser').entities()) - self.assertEqual(len(users), 4) + users = list(req.find('CWUser').entities()) + self.assertEqual(len(users), 4) - groups = list( - self.request().find('CWGroup', reverse_in_group=u).entities()) - self.assertEqual(len(groups), 1) - self.assertEqual(groups[0].name, u'users') + groups = list( + req.find('CWGroup', reverse_in_group=u).entities()) + self.assertEqual(len(groups), 1) + self.assertEqual(groups[0].name, u'users') - users = self.request().find('CWUser', in_group=groups[0]).entities() - users = list(users) - self.assertEqual(len(users), 2) + users = req.find('CWUser', in_group=groups[0]).entities() + users = list(users) + self.assertEqual(len(users), 2) - with self.assertRaises(AssertionError): - self.request().find('CWUser', chapeau=u"melon") + with self.assertRaises(AssertionError): + req.find('CWUser', chapeau=u"melon") - with self.assertRaises(AssertionError): - self.request().find('CWUser', reverse_buddy=users[0]) + with self.assertRaises(AssertionError): + req.find('CWUser', reverse_buddy=users[0]) - with self.assertRaises(NotImplementedError): - self.request().find('CWUser', in_group=[1, 2]) + with self.assertRaises(NotImplementedError): + req.find('CWUser', in_group=[1, 2]) if __name__ == '__main__': unittest_main() diff -r 84738d495ffd -r 793377697c81 test/unittest_rqlrewrite.py --- a/test/unittest_rqlrewrite.py Wed Sep 24 17:35:59 2014 +0200 +++ b/test/unittest_rqlrewrite.py Wed Sep 24 18:04:30 2014 +0200 @@ -129,7 +129,7 @@ "F name 'read', F require_group E, A is State, E is CWGroup, F is CWPermission), " "(EXISTS(S ref LIKE 'PUBLIC%')) OR (EXISTS(B in_group G, G name 'public', G is CWGroup)), " "S is Affaire") - self.assertTrue('D' in kwargs) + self.assertIn('D', kwargs) def test_or(self): constraint = '(X identity U) OR (X in_state ST, CL identity U, CL in_state ST, ST name "subscribed")' @@ -507,11 +507,12 @@ args = {} querier = self.repo.querier union = querier.parse(rql) - querier.solutions(self.session, union, args) - querier._annotate(union) - plan = querier.plan_factory(union, args, self.session) - plan.preprocess(union) - return union + with self.admin_access.repo_cnx() as cnx: + querier.solutions(cnx, union, args) + querier._annotate(union) + plan = querier.plan_factory(union, args, cnx) + plan.preprocess(union) + return union def test_ambiguous_optional_same_exprs(self): """See #3013535""" diff -r 84738d495ffd -r 793377697c81 test/unittest_rset.py --- a/test/unittest_rset.py Wed Sep 24 17:35:59 2014 +0200 +++ b/test/unittest_rset.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,5 +1,5 @@ # coding: utf-8 -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -27,7 +27,6 @@ from cubicweb.devtools.testlib import CubicWebTC from cubicweb.rset import NotAnEntity, ResultSet, attr_desc_iterator - from cubicweb import NoResultError, MultipleResultsError @@ -104,27 +103,27 @@ self.assertEqual(len(pickle.dumps(self.rset)), 392) def test_build_url(self): - req = self.request() - baseurl = req.base_url() - self.compare_urls(req.build_url('view', vid='foo', rql='yo'), - '%sview?vid=foo&rql=yo' % baseurl) - self.compare_urls(req.build_url('view', _restpath='task/title/go'), - '%stask/title/go' % baseurl) - #self.compare_urls(req.build_url('view', _restpath='/task/title/go'), - # '%stask/title/go' % baseurl) - # empty _restpath should not crash - self.compare_urls(req.build_url('view', _restpath=''), baseurl) - self.assertNotIn('https', req.build_url('view', vid='foo', rql='yo', - __secure__=True)) - try: - self.config.global_set_option('https-url', 'https://testing.fr/') - self.assertTrue('https', req.build_url('view', vid='foo', rql='yo', - __secure__=True)) - self.compare_urls(req.build_url('view', vid='foo', rql='yo', - __secure__=True), - '%sview?vid=foo&rql=yo' % req.base_url(secure=True)) - finally: - self.config.global_set_option('https-url', None) + with self.admin_access.web_request() as req: + baseurl = req.base_url() + self.compare_urls(req.build_url('view', vid='foo', rql='yo'), + '%sview?vid=foo&rql=yo' % baseurl) + self.compare_urls(req.build_url('view', _restpath='task/title/go'), + '%stask/title/go' % baseurl) + #self.compare_urls(req.build_url('view', _restpath='/task/title/go'), + # '%stask/title/go' % baseurl) + # empty _restpath should not crash + self.compare_urls(req.build_url('view', _restpath=''), baseurl) + self.assertNotIn('https', req.build_url('view', vid='foo', rql='yo', + __secure__=True)) + try: + self.config.global_set_option('https-url', 'https://testing.fr/') + self.assertTrue('https', req.build_url('view', vid='foo', rql='yo', + __secure__=True)) + self.compare_urls(req.build_url('view', vid='foo', rql='yo', + __secure__=True), + '%sview?vid=foo&rql=yo' % req.base_url(secure=True)) + finally: + self.config.global_set_option('https-url', None) def test_build(self): @@ -139,88 +138,92 @@ rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']], 'Any U,L where U is CWUser, U login L', description=[['CWUser', 'String']] * 3) - rs.req = self.request() - rs.vreg = self.vreg - self.assertEqual(rs.limit(2).rows, [[12000, 'adim'], [13000, 'syt']]) - rs2 = rs.limit(2, offset=1) - self.assertEqual(rs2.rows, [[13000, 'syt'], [14000, 'nico']]) - self.assertEqual(rs2.get_entity(0, 0).cw_row, 0) - self.assertEqual(rs.limit(2, offset=2).rows, [[14000, 'nico']]) - self.assertEqual(rs.limit(2, offset=3).rows, []) + with self.admin_access.web_request() as req: + rs.req = req + rs.vreg = self.vreg + self.assertEqual(rs.limit(2).rows, [[12000, 'adim'], [13000, 'syt']]) + rs2 = rs.limit(2, offset=1) + self.assertEqual(rs2.rows, [[13000, 'syt'], [14000, 'nico']]) + self.assertEqual(rs2.get_entity(0, 0).cw_row, 0) + self.assertEqual(rs.limit(2, offset=2).rows, [[14000, 'nico']]) + self.assertEqual(rs.limit(2, offset=3).rows, []) def test_limit_2(self): - req = self.request() - # drop user from cache for the sake of this test - req.drop_entity_cache(req.user.eid) - rs = req.execute('Any E,U WHERE E is CWEType, E created_by U') - # get entity on row 9. This will fill its created_by relation cache, - # with cwuser on row 9 as well - e1 = rs.get_entity(9, 0) - # get entity on row 10. This will fill its created_by relation cache, - # with cwuser built on row 9 - e2 = rs.get_entity(10, 0) - # limit result set from row 10 - rs.limit(1, 10, inplace=True) - # get back eid - e = rs.get_entity(0, 0) - self.assertTrue(e2 is e) - # rs.limit has properly removed cwuser for request cache, but it's - # still referenced by e/e2 relation cache - u = e.created_by[0] - # now ensure this doesn't trigger IndexError because cwuser.cw_row is 9 - # while now rset has only one row - u.cw_rset[u.cw_row] + with self.admin_access.web_request() as req: + # drop user from cache for the sake of this test + req.drop_entity_cache(req.user.eid) + rs = req.execute('Any E,U WHERE E is CWEType, E created_by U') + # get entity on row 9. This will fill its created_by relation cache, + # with cwuser on row 9 as well + e1 = rs.get_entity(9, 0) + # get entity on row 10. This will fill its created_by relation cache, + # with cwuser built on row 9 + e2 = rs.get_entity(10, 0) + # limit result set from row 10 + rs.limit(1, 10, inplace=True) + # get back eid + e = rs.get_entity(0, 0) + self.assertTrue(e2 is e) + # rs.limit has properly removed cwuser for request cache, but it's + # still referenced by e/e2 relation cache + u = e.created_by[0] + # now ensure this doesn't trigger IndexError because cwuser.cw_row is 9 + # while now rset has only one row + u.cw_rset[u.cw_row] def test_filter(self): rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']], 'Any U,L where U is CWUser, U login L', description=[['CWUser', 'String']] * 3) - rs.req = self.request() - rs.vreg = self.vreg - def test_filter(entity): - return entity.login != 'nico' + with self.admin_access.web_request() as req: + rs.req = req + rs.vreg = self.vreg + def test_filter(entity): + return entity.login != 'nico' - rs2 = rs.filtered_rset(test_filter) - self.assertEqual(len(rs2), 2) - self.assertEqual([login for _, login in rs2], ['adim', 'syt']) - self.assertEqual(rs2.description, rs.description[1:]) + rs2 = rs.filtered_rset(test_filter) + self.assertEqual(len(rs2), 2) + self.assertEqual([login for _, login in rs2], ['adim', 'syt']) + self.assertEqual(rs2.description, rs.description[1:]) def test_transform(self): rs = ResultSet([[12, 'adim'], [13, 'syt'], [14, 'nico']], 'Any U,L where U is CWUser, U login L', description=[['CWUser', 'String']] * 3) - rs.req = self.request() - def test_transform(row, desc): - return row[1:], desc[1:] - rs2 = rs.transformed_rset(test_transform) + with self.admin_access.web_request() as req: + rs.req = req + def test_transform(row, desc): + return row[1:], desc[1:] + rs2 = rs.transformed_rset(test_transform) - self.assertEqual(len(rs2), 3) - self.assertEqual(list(rs2), [['adim'],['syt'],['nico']]) + self.assertEqual(len(rs2), 3) + self.assertEqual(list(rs2), [['adim'],['syt'],['nico']]) def test_sort(self): rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']], 'Any U,L where U is CWUser, U login L', description=[['CWUser', 'String']] * 3) - rs.req = self.request() - rs.vreg = self.vreg + with self.admin_access.web_request() as req: + rs.req = req + rs.vreg = self.vreg - rs2 = rs.sorted_rset(lambda e:e.cw_attr_cache['login']) - self.assertEqual(len(rs2), 3) - self.assertEqual([login for _, login in rs2], ['adim', 'nico', 'syt']) - # make sure rs is unchanged - self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico']) + rs2 = rs.sorted_rset(lambda e:e.cw_attr_cache['login']) + self.assertEqual(len(rs2), 3) + self.assertEqual([login for _, login in rs2], ['adim', 'nico', 'syt']) + # make sure rs is unchanged + self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico']) - rs2 = rs.sorted_rset(lambda e:e.cw_attr_cache['login'], reverse=True) - self.assertEqual(len(rs2), 3) - self.assertEqual([login for _, login in rs2], ['syt', 'nico', 'adim']) - # make sure rs is unchanged - self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico']) + rs2 = rs.sorted_rset(lambda e:e.cw_attr_cache['login'], reverse=True) + self.assertEqual(len(rs2), 3) + self.assertEqual([login for _, login in rs2], ['syt', 'nico', 'adim']) + # make sure rs is unchanged + self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico']) - rs3 = rs.sorted_rset(lambda row: row[1], col=-1) - self.assertEqual(len(rs3), 3) - self.assertEqual([login for _, login in rs3], ['adim', 'nico', 'syt']) - # make sure rs is unchanged - self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico']) + rs3 = rs.sorted_rset(lambda row: row[1], col=-1) + self.assertEqual(len(rs3), 3) + self.assertEqual([login for _, login in rs3], ['adim', 'nico', 'syt']) + # make sure rs is unchanged + self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico']) def test_split(self): rs = ResultSet([[12000, 'adim', u'Adim chez les pinguins'], @@ -231,40 +234,41 @@ 'Any U, L, T WHERE U is CWUser, U login L,'\ 'D created_by U, D title T', description=[['CWUser', 'String', 'String']] * 5) - rs.req = self.request() - rs.vreg = self.vreg - rsets = rs.split_rset(lambda e:e.cw_attr_cache['login']) - self.assertEqual(len(rsets), 3) - self.assertEqual([login for _, login,_ in rsets[0]], ['adim', 'adim']) - self.assertEqual([login for _, login,_ in rsets[1]], ['syt']) - self.assertEqual([login for _, login,_ in rsets[2]], ['nico', 'nico']) - # make sure rs is unchanged - self.assertEqual([login for _, login,_ in rs], ['adim', 'adim', 'syt', 'nico', 'nico']) + with self.admin_access.web_request() as req: + rs.req = req + rs.vreg = self.vreg + rsets = rs.split_rset(lambda e:e.cw_attr_cache['login']) + self.assertEqual(len(rsets), 3) + self.assertEqual([login for _, login,_ in rsets[0]], ['adim', 'adim']) + self.assertEqual([login for _, login,_ in rsets[1]], ['syt']) + self.assertEqual([login for _, login,_ in rsets[2]], ['nico', 'nico']) + # make sure rs is unchanged + self.assertEqual([login for _, login,_ in rs], ['adim', 'adim', 'syt', 'nico', 'nico']) - rsets = rs.split_rset(lambda e:e.cw_attr_cache['login'], return_dict=True) - self.assertEqual(len(rsets), 3) - self.assertEqual([login for _, login,_ in rsets['nico']], ['nico', 'nico']) - self.assertEqual([login for _, login,_ in rsets['adim']], ['adim', 'adim']) - self.assertEqual([login for _, login,_ in rsets['syt']], ['syt']) - # make sure rs is unchanged - self.assertEqual([login for _, login,_ in rs], ['adim', 'adim', 'syt', 'nico', 'nico']) + rsets = rs.split_rset(lambda e:e.cw_attr_cache['login'], return_dict=True) + self.assertEqual(len(rsets), 3) + self.assertEqual([login for _, login,_ in rsets['nico']], ['nico', 'nico']) + self.assertEqual([login for _, login,_ in rsets['adim']], ['adim', 'adim']) + self.assertEqual([login for _, login,_ in rsets['syt']], ['syt']) + # make sure rs is unchanged + self.assertEqual([login for _, login,_ in rs], ['adim', 'adim', 'syt', 'nico', 'nico']) - rsets = rs.split_rset(lambda s: s.count('d'), col=2) - self.assertEqual(len(rsets), 2) - self.assertEqual([title for _, _, title in rsets[0]], - [u"Adim chez les pinguins", - u"Jardiner facile", - u"L'épluchage du castor commun",]) - self.assertEqual([title for _, _, title in rsets[1]], - [u"Le carrelage en 42 leçons", - u"La tarte tatin en 15 minutes",]) - # make sure rs is unchanged - self.assertEqual([title for _, _, title in rs], - [u'Adim chez les pinguins', - u'Jardiner facile', - u'Le carrelage en 42 leçons', - u'La tarte tatin en 15 minutes', - u"L'épluchage du castor commun"]) + rsets = rs.split_rset(lambda s: s.count('d'), col=2) + self.assertEqual(len(rsets), 2) + self.assertEqual([title for _, _, title in rsets[0]], + [u"Adim chez les pinguins", + u"Jardiner facile", + u"L'épluchage du castor commun",]) + self.assertEqual([title for _, _, title in rsets[1]], + [u"Le carrelage en 42 leçons", + u"La tarte tatin en 15 minutes",]) + # make sure rs is unchanged + self.assertEqual([title for _, _, title in rs], + [u'Adim chez les pinguins', + u'Jardiner facile', + u'Le carrelage en 42 leçons', + u'La tarte tatin en 15 minutes', + u"L'épluchage du castor commun"]) def test_cached_syntax_tree(self): """make sure syntax tree is cached""" @@ -273,265 +277,291 @@ self.assert_(rqlst1 is rqlst2) def test_get_entity_simple(self): - self.request().create_entity('CWUser', login=u'adim', upassword='adim', - surname=u'di mascio', firstname=u'adrien') - e = self.execute('Any X,T WHERE X login "adim", X surname T').get_entity(0, 0) - self.assertEqual(e.cw_attr_cache['surname'], 'di mascio') - self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'firstname') - self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'creation_date') - self.assertEqual(pprelcachedict(e._cw_related_cache), []) - e.complete() - self.assertEqual(e.cw_attr_cache['firstname'], 'adrien') - self.assertEqual(pprelcachedict(e._cw_related_cache), []) + with self.admin_access.web_request() as req: + req.create_entity('CWUser', login=u'adim', upassword='adim', + surname=u'di mascio', firstname=u'adrien') + req.cnx.drop_entity_cache() + e = req.execute('Any X,T WHERE X login "adim", X surname T').get_entity(0, 0) + self.assertEqual(e.cw_attr_cache['surname'], 'di mascio') + self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'firstname') + self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'creation_date') + self.assertEqual(pprelcachedict(e._cw_related_cache), []) + e.complete() + self.assertEqual(e.cw_attr_cache['firstname'], 'adrien') + self.assertEqual(pprelcachedict(e._cw_related_cache), []) def test_get_entity_advanced(self): - self.request().create_entity('Bookmark', title=u'zou', path=u'/view') - self.execute('SET X bookmarked_by Y WHERE X is Bookmark, Y login "anon"') - rset = self.execute('Any X,Y,XT,YN WHERE X bookmarked_by Y, X title XT, Y login YN') + with self.admin_access.web_request() as req: + req.create_entity('Bookmark', title=u'zou', path=u'/view') + req.cnx.drop_entity_cache() + req.execute('SET X bookmarked_by Y WHERE X is Bookmark, Y login "anon"') + rset = req.execute('Any X,Y,XT,YN WHERE X bookmarked_by Y, X title XT, Y login YN') - e = rset.get_entity(0, 0) - self.assertEqual(e.cw_row, 0) - self.assertEqual(e.cw_col, 0) - self.assertEqual(e.cw_attr_cache['title'], 'zou') - self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'path') - self.assertEqual(e.view('text'), 'zou') - self.assertEqual(pprelcachedict(e._cw_related_cache), []) + e = rset.get_entity(0, 0) + self.assertEqual(e.cw_row, 0) + self.assertEqual(e.cw_col, 0) + self.assertEqual(e.cw_attr_cache['title'], 'zou') + self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'path') + self.assertEqual(e.view('text'), 'zou') + self.assertEqual(pprelcachedict(e._cw_related_cache), []) - e = rset.get_entity(0, 1) - self.assertEqual(e.cw_row, 0) - self.assertEqual(e.cw_col, 1) - self.assertEqual(e.cw_attr_cache['login'], 'anon') - self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'firstname') - self.assertEqual(pprelcachedict(e._cw_related_cache), - []) - e.complete() - self.assertEqual(e.cw_attr_cache['firstname'], None) - self.assertEqual(e.view('text'), 'anon') - self.assertEqual(pprelcachedict(e._cw_related_cache), - []) + e = rset.get_entity(0, 1) + self.assertEqual(e.cw_row, 0) + self.assertEqual(e.cw_col, 1) + self.assertEqual(e.cw_attr_cache['login'], 'anon') + self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'firstname') + self.assertEqual(pprelcachedict(e._cw_related_cache), + []) + e.complete() + self.assertEqual(e.cw_attr_cache['firstname'], None) + self.assertEqual(e.view('text'), 'anon') + self.assertEqual(pprelcachedict(e._cw_related_cache), + []) - self.assertRaises(NotAnEntity, rset.get_entity, 0, 2) - self.assertRaises(NotAnEntity, rset.get_entity, 0, 3) + self.assertRaises(NotAnEntity, rset.get_entity, 0, 2) + self.assertRaises(NotAnEntity, rset.get_entity, 0, 3) def test_get_entity_relation_cache_compt(self): - rset = self.execute('Any X,S WHERE X in_state S, X login "anon"') - e = rset.get_entity(0, 0) - seid = self.execute('State X WHERE X name "activated"')[0][0] - # for_user / in_group are prefetched in CWUser __init__, in_state should - # be filed from our query rset - self.assertEqual(pprelcachedict(e._cw_related_cache), - [('in_state_subject', [seid])]) + with self.admin_access.web_request() as req: + rset = req.execute('Any X,S WHERE X in_state S, X login "anon"') + e = rset.get_entity(0, 0) + seid = req.execute('State X WHERE X name "activated"')[0][0] + # for_user / in_group are prefetched in CWUser __init__, in_state should + # be filed from our query rset + self.assertEqual(pprelcachedict(e._cw_related_cache), + [('in_state_subject', [seid])]) def test_get_entity_advanced_prefilled_cache(self): - e = self.request().create_entity('Bookmark', title=u'zou', path=u'path') - self.commit() - rset = self.execute('Any X,U,S,XT,UL,SN WHERE X created_by U, U in_state S, ' - 'X title XT, S name SN, U login UL, X eid %s' % e.eid) - e = rset.get_entity(0, 0) - self.assertEqual(e.cw_attr_cache['title'], 'zou') - self.assertEqual(pprelcachedict(e._cw_related_cache), - [('created_by_subject', [self.user().eid])]) - # first level of recursion - u = e.created_by[0] - self.assertEqual(u.cw_attr_cache['login'], 'admin') - self.assertRaises(KeyError, u.cw_attr_cache.__getitem__, 'firstname') - # second level of recursion - s = u.in_state[0] - self.assertEqual(s.cw_attr_cache['name'], 'activated') - self.assertRaises(KeyError, s.cw_attr_cache.__getitem__, 'description') + with self.admin_access.web_request() as req: + e = req.create_entity('Bookmark', title=u'zou', path=u'path') + req.cnx.commit() + rset = req.execute('Any X,U,S,XT,UL,SN WHERE X created_by U, U in_state S, ' + 'X title XT, S name SN, U login UL, X eid %s' % e.eid) + e = rset.get_entity(0, 0) + self.assertEqual(e.cw_attr_cache['title'], 'zou') + self.assertEqual(pprelcachedict(e._cw_related_cache), + [('created_by_subject', [req.user.eid])]) + # first level of recursion + u = e.created_by[0] + self.assertEqual(u.cw_attr_cache['login'], 'admin') + self.assertRaises(KeyError, u.cw_attr_cache.__getitem__, 'firstname') + # second level of recursion + s = u.in_state[0] + self.assertEqual(s.cw_attr_cache['name'], 'activated') + self.assertRaises(KeyError, s.cw_attr_cache.__getitem__, 'description') def test_get_entity_cache_with_left_outer_join(self): - eid = self.execute('INSERT CWUser E: E login "joe", E upassword "joe", E in_group G ' - 'WHERE G name "users"')[0][0] - rset = self.execute('Any X,E WHERE X eid %(x)s, X primary_email E?', {'x': eid}) - e = rset.get_entity(0, 0) - # if any of the assertion below fails with a KeyError, the relation is not cached - # related entities should be an empty list - self.assertEqual(e._cw_related_cache['primary_email_subject'][True], ()) - # related rset should be an empty rset - cached = e._cw_related_cache['primary_email_subject'][False] - self.assertIsInstance(cached, ResultSet) - self.assertEqual(cached.rowcount, 0) + with self.admin_access.web_request() as req: + eid = req.execute('INSERT CWUser E: E login "joe", E upassword "joe", E in_group G ' + 'WHERE G name "users"')[0][0] + rset = req.execute('Any X,E WHERE X eid %(x)s, X primary_email E?', {'x': eid}) + e = rset.get_entity(0, 0) + # if any of the assertion below fails with a KeyError, the relation is not cached + # related entities should be an empty list + self.assertEqual(e._cw_related_cache['primary_email_subject'][True], ()) + # related rset should be an empty rset + cached = e._cw_related_cache['primary_email_subject'][False] + self.assertIsInstance(cached, ResultSet) + self.assertEqual(cached.rowcount, 0) def test_get_entity_union(self): - e = self.request().create_entity('Bookmark', title=u'manger', path=u'path') - rset = self.execute('Any X,N ORDERBY N WITH X,N BEING ' - '((Any X,N WHERE X is Bookmark, X title N)' - ' UNION ' - ' (Any X,N WHERE X is CWGroup, X name N))') - expected = (('CWGroup', 'guests'), ('CWGroup', 'managers'), - ('Bookmark', 'manger'), ('CWGroup', 'owners'), - ('CWGroup', 'users')) - for entity in rset.entities(): # test get_entity for each row actually - etype, n = expected[entity.cw_row] - self.assertEqual(entity.cw_etype, etype) - attr = etype == 'Bookmark' and 'title' or 'name' - self.assertEqual(entity.cw_attr_cache[attr], n) + with self.admin_access.web_request() as req: + e = req.create_entity('Bookmark', title=u'manger', path=u'path') + req.cnx.drop_entity_cache() + rset = req.execute('Any X,N ORDERBY N WITH X,N BEING ' + '((Any X,N WHERE X is Bookmark, X title N)' + ' UNION ' + ' (Any X,N WHERE X is CWGroup, X name N))') + expected = (('CWGroup', 'guests'), ('CWGroup', 'managers'), + ('Bookmark', 'manger'), ('CWGroup', 'owners'), + ('CWGroup', 'users')) + for entity in rset.entities(): # test get_entity for each row actually + etype, n = expected[entity.cw_row] + self.assertEqual(entity.cw_etype, etype) + attr = etype == 'Bookmark' and 'title' or 'name' + self.assertEqual(entity.cw_attr_cache[attr], n) def test_one(self): - self.request().create_entity('CWUser', login=u'cdevienne', - upassword=u'cdevienne', - surname=u'de Vienne', - firstname=u'Christophe') - e = self.execute('Any X WHERE X login "cdevienne"').one() - - self.assertEqual(e.surname, u'de Vienne') + with self.admin_access.web_request() as req: + req.create_entity('CWUser', login=u'cdevienne', + upassword=u'cdevienne', + surname=u'de Vienne', + firstname=u'Christophe') + e = req.execute('Any X WHERE X login "cdevienne"').one() - e = self.execute( - 'Any X, N WHERE X login "cdevienne", X surname N').one() - self.assertEqual(e.surname, u'de Vienne') + self.assertEqual(e.surname, u'de Vienne') - e = self.execute( - 'Any N, X WHERE X login "cdevienne", X surname N').one(col=1) - self.assertEqual(e.surname, u'de Vienne') + e = req.execute( + 'Any X, N WHERE X login "cdevienne", X surname N').one() + self.assertEqual(e.surname, u'de Vienne') + + e = req.execute( + 'Any N, X WHERE X login "cdevienne", X surname N').one(col=1) + self.assertEqual(e.surname, u'de Vienne') def test_one_no_rows(self): - with self.assertRaises(NoResultError): - self.execute('Any X WHERE X login "patanok"').one() + with self.admin_access.web_request() as req: + with self.assertRaises(NoResultError): + req.execute('Any X WHERE X login "patanok"').one() def test_one_multiple_rows(self): - self.request().create_entity( - 'CWUser', login=u'cdevienne', upassword=u'cdevienne', - surname=u'de Vienne', firstname=u'Christophe') + with self.admin_access.web_request() as req: + req.create_entity( + 'CWUser', login=u'cdevienne', upassword=u'cdevienne', + surname=u'de Vienne', firstname=u'Christophe') - self.request().create_entity( - 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', - firstname=u'adrien') + req.create_entity( + 'CWUser', login=u'adim', upassword='adim', surname=u'di mascio', + firstname=u'adrien') - with self.assertRaises(MultipleResultsError): - self.execute('Any X WHERE X is CWUser').one() + with self.assertRaises(MultipleResultsError): + req.execute('Any X WHERE X is CWUser').one() def test_related_entity_optional(self): - e = self.request().create_entity('Bookmark', title=u'aaaa', path=u'path') - rset = self.execute('Any B,U,L WHERE B bookmarked_by U?, U login L') - entity, rtype = rset.related_entity(0, 2) - self.assertEqual(entity, None) - self.assertEqual(rtype, None) + with self.admin_access.web_request() as req: + e = req.create_entity('Bookmark', title=u'aaaa', path=u'path') + rset = req.execute('Any B,U,L WHERE B bookmarked_by U?, U login L') + entity, rtype = rset.related_entity(0, 2) + self.assertEqual(entity, None) + self.assertEqual(rtype, None) def test_related_entity_union_subquery_1(self): - e = self.request().create_entity('Bookmark', title=u'aaaa', path=u'path') - rset = self.execute('Any X,N ORDERBY N WITH X,N BEING ' - '((Any X,N WHERE X is CWGroup, X name N)' - ' UNION ' - ' (Any X,N WHERE X is Bookmark, X title N))') - entity, rtype = rset.related_entity(0, 1) - self.assertEqual(entity.eid, e.eid) - self.assertEqual(rtype, 'title') - self.assertEqual(entity.title, 'aaaa') - entity, rtype = rset.related_entity(1, 1) - self.assertEqual(entity.cw_etype, 'CWGroup') - self.assertEqual(rtype, 'name') - self.assertEqual(entity.name, 'guests') + with self.admin_access.web_request() as req: + e = req.create_entity('Bookmark', title=u'aaaa', path=u'path') + rset = req.execute('Any X,N ORDERBY N WITH X,N BEING ' + '((Any X,N WHERE X is CWGroup, X name N)' + ' UNION ' + ' (Any X,N WHERE X is Bookmark, X title N))') + entity, rtype = rset.related_entity(0, 1) + self.assertEqual(entity.eid, e.eid) + self.assertEqual(rtype, 'title') + self.assertEqual(entity.title, 'aaaa') + entity, rtype = rset.related_entity(1, 1) + self.assertEqual(entity.cw_etype, 'CWGroup') + self.assertEqual(rtype, 'name') + self.assertEqual(entity.name, 'guests') def test_related_entity_union_subquery_2(self): - e = self.request().create_entity('Bookmark', title=u'aaaa', path=u'path') - rset = self.execute('Any X,N ORDERBY N WHERE X is Bookmark WITH X,N BEING ' - '((Any X,N WHERE X is CWGroup, X name N)' - ' UNION ' - ' (Any X,N WHERE X is Bookmark, X title N))') - entity, rtype = rset.related_entity(0, 1) - self.assertEqual(entity.eid, e.eid) - self.assertEqual(rtype, 'title') - self.assertEqual(entity.title, 'aaaa') + with self.admin_access.web_request() as req: + e = req.create_entity('Bookmark', title=u'aaaa', path=u'path') + rset = req.execute('Any X,N ORDERBY N WHERE X is Bookmark WITH X,N BEING ' + '((Any X,N WHERE X is CWGroup, X name N)' + ' UNION ' + ' (Any X,N WHERE X is Bookmark, X title N))') + entity, rtype = rset.related_entity(0, 1) + self.assertEqual(entity.eid, e.eid) + self.assertEqual(rtype, 'title') + self.assertEqual(entity.title, 'aaaa') def test_related_entity_union_subquery_3(self): - e = self.request().create_entity('Bookmark', title=u'aaaa', path=u'path') - rset = self.execute('Any X,N ORDERBY N WITH N,X BEING ' - '((Any N,X WHERE X is CWGroup, X name N)' - ' UNION ' - ' (Any N,X WHERE X is Bookmark, X title N))') - entity, rtype = rset.related_entity(0, 1) - self.assertEqual(entity.eid, e.eid) - self.assertEqual(rtype, 'title') - self.assertEqual(entity.title, 'aaaa') + with self.admin_access.web_request() as req: + e = req.create_entity('Bookmark', title=u'aaaa', path=u'path') + rset = req.execute('Any X,N ORDERBY N WITH N,X BEING ' + '((Any N,X WHERE X is CWGroup, X name N)' + ' UNION ' + ' (Any N,X WHERE X is Bookmark, X title N))') + entity, rtype = rset.related_entity(0, 1) + self.assertEqual(entity.eid, e.eid) + self.assertEqual(rtype, 'title') + self.assertEqual(entity.title, 'aaaa') def test_related_entity_union_subquery_4(self): - e = self.request().create_entity('Bookmark', title=u'aaaa', path=u'path') - rset = self.execute('Any X,X, N ORDERBY N WITH X,N BEING ' - '((Any X,N WHERE X is CWGroup, X name N)' - ' UNION ' - ' (Any X,N WHERE X is Bookmark, X title N))') - entity, rtype = rset.related_entity(0, 2) - self.assertEqual(entity.eid, e.eid) - self.assertEqual(rtype, 'title') - self.assertEqual(entity.title, 'aaaa') + with self.admin_access.web_request() as req: + e = req.create_entity('Bookmark', title=u'aaaa', path=u'path') + rset = req.execute('Any X,X, N ORDERBY N WITH X,N BEING ' + '((Any X,N WHERE X is CWGroup, X name N)' + ' UNION ' + ' (Any X,N WHERE X is Bookmark, X title N))') + entity, rtype = rset.related_entity(0, 2) + self.assertEqual(entity.eid, e.eid) + self.assertEqual(rtype, 'title') + self.assertEqual(entity.title, 'aaaa') def test_related_entity_trap_subquery(self): - req = self.request() - req.create_entity('Bookmark', title=u'test bookmark', path=u'') - self.execute('SET B bookmarked_by U WHERE U login "admin"') - rset = self.execute('Any B,T,L WHERE B bookmarked_by U, U login L ' - 'WITH B,T BEING (Any B,T WHERE B is Bookmark, B title T)') - rset.related_entity(0, 2) + with self.admin_access.web_request() as req: + req.create_entity('Bookmark', title=u'test bookmark', path=u'') + req.execute('SET B bookmarked_by U WHERE U login "admin"') + rset = req.execute('Any B,T,L WHERE B bookmarked_by U, U login L ' + 'WITH B,T BEING (Any B,T WHERE B is Bookmark, B title T)') + rset.related_entity(0, 2) def test_related_entity_subquery_outerjoin(self): - rset = self.execute('Any X,S,L WHERE X in_state S ' - 'WITH X, L BEING (Any X,MAX(L) GROUPBY X ' - 'WHERE X is CWUser, T? wf_info_for X, T creation_date L)') - self.assertEqual(len(rset), 2) - rset.related_entity(0, 1) - rset.related_entity(0, 2) + with self.admin_access.web_request() as req: + rset = req.execute('Any X,S,L WHERE X in_state S ' + 'WITH X, L BEING (Any X,MAX(L) GROUPBY X ' + 'WHERE X is CWUser, T? wf_info_for X, T creation_date L)') + self.assertEqual(len(rset), 2) + rset.related_entity(0, 1) + rset.related_entity(0, 2) def test_entities(self): - rset = self.execute('Any U,G WHERE U in_group G') - # make sure we have at least one element - self.assertTrue(rset) - self.assertEqual(set(e.e_schema.type for e in rset.entities(0)), - set(['CWUser',])) - self.assertEqual(set(e.e_schema.type for e in rset.entities(1)), - set(['CWGroup',])) + with self.admin_access.web_request() as req: + rset = req.execute('Any U,G WHERE U in_group G') + # make sure we have at least one element + self.assertTrue(rset) + self.assertEqual(set(e.e_schema.type for e in rset.entities(0)), + set(['CWUser',])) + self.assertEqual(set(e.e_schema.type for e in rset.entities(1)), + set(['CWGroup',])) def test_iter_rows_with_entities(self): - rset = self.execute('Any U,UN,G,GN WHERE U in_group G, U login UN, G name GN') - # make sure we have at least one element - self.assertTrue(rset) - out = list(rset.iter_rows_with_entities())[0] - self.assertEqual( out[0].login, out[1] ) - self.assertEqual( out[2].name, out[3] ) + with self.admin_access.web_request() as req: + rset = req.execute('Any U,UN,G,GN WHERE U in_group G, U login UN, G name GN') + # make sure we have at least one element + self.assertTrue(rset) + out = list(rset.iter_rows_with_entities())[0] + self.assertEqual( out[0].login, out[1] ) + self.assertEqual( out[2].name, out[3] ) def test_printable_rql(self): - rset = self.execute(u'CWEType X WHERE X final FALSE') - self.assertEqual(rset.printable_rql(), - 'Any X WHERE X final FALSE, X is CWEType') + with self.admin_access.web_request() as req: + rset = req.execute(u'CWEType X WHERE X final FALSE') + self.assertEqual(rset.printable_rql(), + 'Any X WHERE X final FALSE, X is CWEType') def test_searched_text(self): - rset = self.execute(u'Any X WHERE X has_text "foobar"') - self.assertEqual(rset.searched_text(), 'foobar') - rset = self.execute(u'Any X WHERE X has_text %(text)s', {'text' : 'foo'}) - self.assertEqual(rset.searched_text(), 'foo') + with self.admin_access.web_request() as req: + rset = req.execute(u'Any X WHERE X has_text "foobar"') + self.assertEqual(rset.searched_text(), 'foobar') + rset = req.execute(u'Any X WHERE X has_text %(text)s', {'text' : 'foo'}) + self.assertEqual(rset.searched_text(), 'foo') def test_union_limited_rql(self): - rset = self.execute('(Any X,N WHERE X is Bookmark, X title N)' - ' UNION ' - '(Any X,N WHERE X is CWGroup, X name N)') - rset.limit(2, 10, inplace=True) - self.assertEqual(rset.limited_rql(), - 'Any A,B LIMIT 2 OFFSET 10 ' - 'WITH A,B BEING (' - '(Any X,N WHERE X is Bookmark, X title N) ' - 'UNION ' - '(Any X,N WHERE X is CWGroup, X name N)' - ')') + with self.admin_access.web_request() as req: + rset = req.execute('(Any X,N WHERE X is Bookmark, X title N)' + ' UNION ' + '(Any X,N WHERE X is CWGroup, X name N)') + rset.limit(2, 10, inplace=True) + self.assertEqual(rset.limited_rql(), + 'Any A,B LIMIT 2 OFFSET 10 ' + 'WITH A,B BEING (' + '(Any X,N WHERE X is Bookmark, X title N) ' + 'UNION ' + '(Any X,N WHERE X is CWGroup, X name N)' + ')') def test_count_users_by_date(self): - rset = self.execute('Any D, COUNT(U) GROUPBY D WHERE U is CWUser, U creation_date D') - self.assertEqual(rset.related_entity(0,0), (None, None)) + with self.admin_access.web_request() as req: + rset = req.execute('Any D, COUNT(U) GROUPBY D WHERE U is CWUser, U creation_date D') + self.assertEqual(rset.related_entity(0,0), (None, None)) def test_str(self): - rset = self.execute('(Any X,N WHERE X is CWGroup, X name N)') - self.assertIsInstance(str(rset), basestring) - self.assertEqual(len(str(rset).splitlines()), 1) + with self.admin_access.web_request() as req: + rset = req.execute('(Any X,N WHERE X is CWGroup, X name N)') + self.assertIsInstance(str(rset), basestring) + self.assertEqual(len(str(rset).splitlines()), 1) def test_repr(self): - rset = self.execute('(Any X,N WHERE X is CWGroup, X name N)') - self.assertIsInstance(repr(rset), basestring) - self.assertTrue(len(repr(rset).splitlines()) > 1) + with self.admin_access.web_request() as req: + rset = req.execute('(Any X,N WHERE X is CWGroup, X name N)') + self.assertIsInstance(repr(rset), basestring) + self.assertTrue(len(repr(rset).splitlines()) > 1) - rset = self.execute('(Any X WHERE X is CWGroup, X name "managers")') - self.assertIsInstance(str(rset), basestring) - self.assertEqual(len(str(rset).splitlines()), 1) + rset = req.execute('(Any X WHERE X is CWGroup, X name "managers")') + self.assertIsInstance(str(rset), basestring) + self.assertEqual(len(str(rset).splitlines()), 1) if __name__ == '__main__': diff -r 84738d495ffd -r 793377697c81 test/unittest_schema.py --- a/test/unittest_schema.py Wed Sep 24 17:35:59 2014 +0200 +++ b/test/unittest_schema.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -168,7 +168,7 @@ 'CWUniqueTogetherConstraint', 'CWUser', 'ExternalUri', 'File', 'Float', 'Int', 'Interval', 'Note', 'Password', 'Personne', 'Produit', - 'RQLExpression', + 'RQLExpression', 'Reference', 'Service', 'Societe', 'State', 'StateFull', 'String', 'SubNote', 'SubWorkflowExitPoint', 'Tag', 'TZDatetime', 'TZTime', 'Time', 'Transition', 'TrInfo', 'Usine', @@ -188,7 +188,7 @@ 'data', 'data_encoding', 'data_format', 'data_name', 'default_workflow', 'defaultval', 'delete_permission', 'description', 'description_format', 'destination_state', 'dirige', - 'ecrit_par', 'eid', 'end_timestamp', 'evaluee', 'expression', 'exprtype', 'extra_props', + 'ean', 'ecrit_par', 'eid', 'end_timestamp', 'evaluee', 'expression', 'exprtype', 'extra_props', 'fabrique_par', 'final', 'firstname', 'for_user', 'fournit', 'from_entity', 'from_state', 'fulltext_container', 'fulltextindexed', @@ -248,8 +248,8 @@ def test_fulltext_container(self): schema = loader.load(config) - self.assertTrue('has_text' in schema['CWUser'].subject_relations()) - self.assertFalse('has_text' in schema['EmailAddress'].subject_relations()) + self.assertIn('has_text', schema['CWUser'].subject_relations()) + self.assertNotIn('has_text', schema['EmailAddress'].subject_relations()) def test_permission_settings(self): schema = loader.load(config) @@ -329,6 +329,7 @@ self.assertEqual(normalize_expression('X bla Y,Y blur Z , Z zigoulou X '), 'X bla Y, Y blur Z, Z zigoulou X') + class RQLExpressionTC(TestCase): def test_comparison(self): self.assertEqual(ERQLExpression('X is CWUser', 'X', 0), @@ -336,6 +337,7 @@ self.assertNotEqual(ERQLExpression('X is CWUser', 'X', 0), ERQLExpression('X is CWGroup', 'X', 0)) + class GuessRrqlExprMainVarsTC(TestCase): def test_exists(self): mainvars = guess_rrqlexpr_mainvars(normalize_expression('NOT EXISTS(O team_competition C, C level < 3, C concerns S)')) @@ -345,15 +347,112 @@ class RQLConstraintTC(CubicWebTC): def test_user_constraint(self): cstr = RQLConstraint('U identity O') - anoneid = self.execute('Any X WHERE X login "anon"')[0][0] - self.assertRaises(ValidationError, cstr.repo_check, self.session, 1, 'rel', anoneid) - self.assertEqual(cstr.repo_check(self.session, 1, self.session.user.eid), - None) # no validation error, constraint checked + with self.admin_access.repo_cnx() as cnx: + anoneid = cnx.execute('Any X WHERE X login "anon"')[0][0] + self.assertRaises(ValidationError, + cstr.repo_check, cnx, 1, 'rel', anoneid) + self.assertEqual(cstr.repo_check(cnx, 1, cnx.user.eid), + None) # no validation error, constraint checked + class WorkflowShemaTC(CubicWebTC): def test_trinfo_default_format(self): - tr = self.request().user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') - self.assertEqual(tr.comment_format, 'text/plain') + with self.admin_access.web_request() as req: + tr = req.user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') + self.assertEqual(tr.comment_format, 'text/plain') + + +class CompositeSchemaTC(CubicWebTC): + composites = { + 'BaseTransition': [('condition', 'BaseTransition', 'RQLExpression', 'subject')], + 'CWAttribute': [('add_permission', 'CWAttribute', 'RQLExpression', 'subject'), + ('constrained_by', 'CWAttribute', 'CWConstraint', 'subject'), + ('read_permission', 'CWAttribute', 'RQLExpression', 'subject'), + ('update_permission', 'CWAttribute', 'RQLExpression', 'subject')], + 'CWEType': [('add_permission', 'CWEType', 'RQLExpression', 'subject'), + ('constraint_of', 'CWUniqueTogetherConstraint', 'CWEType', 'object'), + ('cw_schema', 'CWSourceSchemaConfig', 'CWEType', 'object'), + ('delete_permission', 'CWEType', 'RQLExpression', 'subject'), + ('from_entity', 'CWAttribute', 'CWEType', 'object'), + ('from_entity', 'CWRelation', 'CWEType', 'object'), + ('read_permission', 'CWEType', 'RQLExpression', 'subject'), + ('to_entity', 'CWAttribute', 'CWEType', 'object'), + ('to_entity', 'CWRelation', 'CWEType', 'object'), + ('update_permission', 'CWEType', 'RQLExpression', 'subject')], + 'CWRType': [('cw_schema', 'CWSourceSchemaConfig', 'CWRType', 'object'), + ('relation_type', 'CWAttribute', 'CWRType', 'object'), + ('relation_type', 'CWRelation', 'CWRType', 'object')], + 'CWRelation': [('add_permission', 'CWRelation', 'RQLExpression', 'subject'), + ('constrained_by', 'CWRelation', 'CWConstraint', 'subject'), + ('cw_schema', 'CWSourceSchemaConfig', 'CWRelation', 'object'), + ('delete_permission', 'CWRelation', 'RQLExpression', 'subject'), + ('read_permission', 'CWRelation', 'RQLExpression', 'subject')], + 'CWSource': [('cw_for_source', 'CWSourceSchemaConfig', 'CWSource', 'object'), + ('cw_host_config_of', 'CWSourceHostConfig', 'CWSource', 'object'), + ('cw_import_of', 'CWDataImport', 'CWSource', 'object'), + ('cw_source', 'Ami', 'CWSource', 'object'), + ('cw_source', 'BaseTransition', 'CWSource', 'object'), + ('cw_source', 'Bookmark', 'CWSource', 'object'), + ('cw_source', 'CWAttribute', 'CWSource', 'object'), + ('cw_source', 'CWCache', 'CWSource', 'object'), + ('cw_source', 'CWConstraint', 'CWSource', 'object'), + ('cw_source', 'CWConstraintType', 'CWSource', 'object'), + ('cw_source', 'CWDataImport', 'CWSource', 'object'), + ('cw_source', 'CWEType', 'CWSource', 'object'), + ('cw_source', 'CWGroup', 'CWSource', 'object'), + ('cw_source', 'CWPermission', 'CWSource', 'object'), + ('cw_source', 'CWProperty', 'CWSource', 'object'), + ('cw_source', 'CWRType', 'CWSource', 'object'), + ('cw_source', 'CWRelation', 'CWSource', 'object'), + ('cw_source', 'CWSource', 'CWSource', 'object'), + ('cw_source', 'CWSourceHostConfig', 'CWSource', 'object'), + ('cw_source', 'CWSourceSchemaConfig', 'CWSource', 'object'), + ('cw_source', 'CWUniqueTogetherConstraint', 'CWSource', 'object'), + ('cw_source', 'CWUser', 'CWSource', 'object'), + ('cw_source', 'Card', 'CWSource', 'object'), + ('cw_source', 'EmailAddress', 'CWSource', 'object'), + ('cw_source', 'ExternalUri', 'CWSource', 'object'), + ('cw_source', 'File', 'CWSource', 'object'), + ('cw_source', 'Note', 'CWSource', 'object'), + ('cw_source', 'Personne', 'CWSource', 'object'), + ('cw_source', 'Produit', 'CWSource', 'object'), + ('cw_source', 'RQLExpression', 'CWSource', 'object'), + ('cw_source', 'Reference', 'CWSource', 'object'), + ('cw_source', 'Service', 'CWSource', 'object'), + ('cw_source', 'Societe', 'CWSource', 'object'), + ('cw_source', 'State', 'CWSource', 'object'), + ('cw_source', 'StateFull', 'CWSource', 'object'), + ('cw_source', 'SubNote', 'CWSource', 'object'), + ('cw_source', 'SubWorkflowExitPoint', 'CWSource', 'object'), + ('cw_source', 'Tag', 'CWSource', 'object'), + ('cw_source', 'TrInfo', 'CWSource', 'object'), + ('cw_source', 'Transition', 'CWSource', 'object'), + ('cw_source', 'Usine', 'CWSource', 'object'), + ('cw_source', 'Workflow', 'CWSource', 'object'), + ('cw_source', 'WorkflowTransition', 'CWSource', 'object')], + 'CWUser': [('for_user', 'CWProperty', 'CWUser', 'object'), + ('use_email', 'CWUser', 'EmailAddress', 'subject'), + ('wf_info_for', 'TrInfo', 'CWUser', 'object')], + 'StateFull': [('wf_info_for', 'TrInfo', 'StateFull', 'object')], + 'Transition': [('condition', 'Transition', 'RQLExpression', 'subject')], + 'Workflow': [('state_of', 'State', 'Workflow', 'object'), + ('transition_of', 'BaseTransition', 'Workflow', 'object'), + ('transition_of', 'Transition', 'Workflow', 'object'), + ('transition_of', 'WorkflowTransition', 'Workflow', 'object')], + 'WorkflowTransition': [('condition', 'WorkflowTransition', 'RQLExpression', 'subject'), + ('subworkflow_exit', 'WorkflowTransition', 'SubWorkflowExitPoint', 'subject')] + } + + def test_composite_entities(self): + schema = self.vreg.schema + self.assertEqual(sorted(self.composites), + [eschema.type for eschema in sorted(schema.entities()) + if eschema.is_composite]) + for etype in self.composites: + self.set_description('composite rdefs for %s' % etype) + yield self.assertEqual, self.composites[etype], \ + sorted([(r.rtype.type, r.subject.type, r.object.type, role) + for r, role in sorted(schema[etype].composite_rdef_roles)]) if __name__ == '__main__': unittest_main() diff -r 84738d495ffd -r 793377697c81 test/unittest_utils.py --- a/test/unittest_utils.py Wed Sep 24 17:35:59 2014 +0200 +++ b/test/unittest_utils.py Wed Sep 24 18:04:30 2014 +0200 @@ -229,11 +229,11 @@ class HTMLHeadTC(CubicWebTC): def htmlhead(self, datadir_url): - req = self.request() - base_url = u'http://test.fr/data/' - req.datadir_url = base_url - head = HTMLHead(req) - return head + with self.admin_access.web_request() as req: + base_url = u'http://test.fr/data/' + req.datadir_url = base_url + head = HTMLHead(req) + return head def test_concat_urls(self): base_url = u'http://test.fr/data/' diff -r 84738d495ffd -r 793377697c81 test/unittest_vregistry.py --- a/test/unittest_vregistry.py Wed Sep 24 17:35:59 2014 +0200 +++ b/test/unittest_vregistry.py Wed Sep 24 18:04:30 2014 +0200 @@ -74,7 +74,7 @@ def test_properties(self): self.vreg.reset() - self.assertFalse('system.version.cubicweb' in self.vreg['propertydefs']) + self.assertNotIn('system.version.cubicweb', self.vreg['propertydefs']) self.assertTrue(self.vreg.property_info('system.version.cubicweb')) self.assertRaises(UnknownProperty, self.vreg.property_info, 'a.non.existent.key') diff -r 84738d495ffd -r 793377697c81 transaction.py --- a/transaction.py Wed Sep 24 17:35:59 2014 +0200 +++ b/transaction.py Wed Sep 24 18:04:30 2014 +0200 @@ -53,7 +53,17 @@ self.datetime = time self.user_eid = ueid # should be set by the dbapi connection - self.req = None + self.req = None # old style + self.cnx = None # new style + + def _execute(self, *args, **kwargs): + """execute a query using either the req or the cnx""" + if self.req is None: + execute = self.cnx.execute + else: + execute = self.req + return execute(*args, **kwargs) + def __repr__(self): return '' % ( @@ -63,8 +73,8 @@ """return the user entity which has done the transaction, none if not found. """ - return self.req.execute('Any X WHERE X eid %(x)s', - {'x': self.user_eid}).get_entity(0, 0) + return self._execute('Any X WHERE X eid %(x)s', + {'x': self.user_eid}).get_entity(0, 0) def actions_list(self, public=True): """return an ordered list of action effectued during that transaction @@ -72,7 +82,11 @@ if public is true, return only 'public' action, eg not ones triggered under the cover by hooks. """ - return self.req.cnx.transaction_actions(self.uuid, public) + if self.req is not None: + cnx = self.req.cnx + else: + cnx = self.cnx + return cnx.transaction_actions(self.uuid, public) class AbstractAction(object): diff -r 84738d495ffd -r 793377697c81 utils.py --- a/utils.py Wed Sep 24 17:35:59 2014 +0200 +++ b/utils.py Wed Sep 24 18:04:30 2014 +0200 @@ -26,6 +26,7 @@ import datetime import random import re +import json from operator import itemgetter from inspect import getargspec @@ -39,6 +40,7 @@ from logilab.mtconverter import xml_escape from logilab.common.deprecation import deprecated +from logilab.common.date import ustrftime _MARKER = object() @@ -167,8 +169,6 @@ id(self), self._item, self._size) def __len__(self): return self._size - def __nonzero__(self): - return self._size def __iter__(self): return repeat(self._item, self._size) def __getitem__(self, index): @@ -465,77 +465,66 @@ self.head.getvalue(), self.body.getvalue()) -try: - # may not be there if cubicweb-web not installed - if sys.version_info < (2, 6): - import simplejson as json - else: - import json -except ImportError: - json_dumps = JSString = None -else: - from logilab.common.date import ustrftime - - class CubicWebJsonEncoder(json.JSONEncoder): - """define a json encoder to be able to encode yams std types""" +class CubicWebJsonEncoder(json.JSONEncoder): + """define a json encoder to be able to encode yams std types""" - def default(self, obj): - if hasattr(obj, '__json_encode__'): - return obj.__json_encode__() - if isinstance(obj, datetime.datetime): - return ustrftime(obj, '%Y/%m/%d %H:%M:%S') - elif isinstance(obj, datetime.date): - return ustrftime(obj, '%Y/%m/%d') - elif isinstance(obj, datetime.time): - return obj.strftime('%H:%M:%S') - elif isinstance(obj, datetime.timedelta): - return (obj.days * 24 * 60 * 60) + obj.seconds - elif isinstance(obj, decimal.Decimal): - return float(obj) - try: - return json.JSONEncoder.default(self, obj) - except TypeError: - # we never ever want to fail because of an unknown type, - # just return None in those cases. - return None + def default(self, obj): + if hasattr(obj, '__json_encode__'): + return obj.__json_encode__() + if isinstance(obj, datetime.datetime): + return ustrftime(obj, '%Y/%m/%d %H:%M:%S') + elif isinstance(obj, datetime.date): + return ustrftime(obj, '%Y/%m/%d') + elif isinstance(obj, datetime.time): + return obj.strftime('%H:%M:%S') + elif isinstance(obj, datetime.timedelta): + return (obj.days * 24 * 60 * 60) + obj.seconds + elif isinstance(obj, decimal.Decimal): + return float(obj) + try: + return json.JSONEncoder.default(self, obj) + except TypeError: + # we never ever want to fail because of an unknown type, + # just return None in those cases. + return None - def json_dumps(value, **kwargs): - return json.dumps(value, cls=CubicWebJsonEncoder, **kwargs) +def json_dumps(value, **kwargs): + return json.dumps(value, cls=CubicWebJsonEncoder, **kwargs) - class JSString(str): - """use this string sub class in values given to :func:`js_dumps` to - insert raw javascript chain in some JSON string - """ +class JSString(str): + """use this string sub class in values given to :func:`js_dumps` to + insert raw javascript chain in some JSON string + """ - def _dict2js(d, predictable=False): - res = [key + ': ' + js_dumps(val, predictable) - for key, val in d.iteritems()] - return '{%s}' % ', '.join(res) +def _dict2js(d, predictable=False): + res = [key + ': ' + js_dumps(val, predictable) + for key, val in d.iteritems()] + return '{%s}' % ', '.join(res) - def _list2js(l, predictable=False): - return '[%s]' % ', '.join([js_dumps(val, predictable) for val in l]) +def _list2js(l, predictable=False): + return '[%s]' % ', '.join([js_dumps(val, predictable) for val in l]) - def js_dumps(something, predictable=False): - """similar as :func:`json_dumps`, except values which are instances of - :class:`JSString` are expected to be valid javascript and will be output - as is +def js_dumps(something, predictable=False): + """similar as :func:`json_dumps`, except values which are instances of + :class:`JSString` are expected to be valid javascript and will be output + as is - >>> js_dumps({'hop': JSString('$.hop'), 'bar': None}, predictable=True) - '{bar: null, hop: $.hop}' - >>> js_dumps({'hop': '$.hop'}) - '{hop: "$.hop"}' - >>> js_dumps({'hip': {'hop': JSString('momo')}}) - '{hip: {hop: momo}}' - """ - if isinstance(something, dict): - return _dict2js(something, predictable) - if isinstance(something, list): - return _list2js(something, predictable) - if isinstance(something, JSString): - return something - return json_dumps(something) + >>> js_dumps({'hop': JSString('$.hop'), 'bar': None}, predictable=True) + '{bar: null, hop: $.hop}' + >>> js_dumps({'hop': '$.hop'}) + '{hop: "$.hop"}' + >>> js_dumps({'hip': {'hop': JSString('momo')}}) + '{hip: {hop: momo}}' + """ + if isinstance(something, dict): + return _dict2js(something, predictable) + if isinstance(something, list): + return _list2js(something, predictable) + if isinstance(something, JSString): + return something + return json_dumps(something) PERCENT_IN_URLQUOTE_RE = re.compile(r'%(?=[0-9a-fA-F]{2})') def js_href(javascript_code): diff -r 84738d495ffd -r 793377697c81 view.py --- a/view.py Wed Sep 24 17:35:59 2014 +0200 +++ b/view.py Wed Sep 24 18:04:30 2014 +0200 @@ -558,14 +558,6 @@ __registry__ = 'adapters' -class auto_unwrap_bw_compat(type): - def __new__(mcs, name, bases, classdict): - cls = type.__new__(mcs, name, bases, classdict) - if not classdict.get('__needs_bw_compat__'): - unwrap_adapter_compat(cls) - return cls - - class EntityAdapter(Adapter): """base class for entity adapters (eg adapt an entity to an interface)""" def __init__(self, _cw, **kwargs): diff -r 84738d495ffd -r 793377697c81 web/_exceptions.py --- a/web/_exceptions.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/_exceptions.py Wed Sep 24 18:04:30 2014 +0200 @@ -64,7 +64,6 @@ def __repr__(self): return '%s(%r, %r)' % (self.__class__.__name__, self.status, self.content) - self.url = url # Publish related error diff -r 84738d495ffd -r 793377697c81 web/action.py --- a/web/action.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/action.py Wed Sep 24 18:04:30 2014 +0200 @@ -25,43 +25,11 @@ The most important method from a developper point of view in the :meth:'Action.url' method, which returns a URL on which the navigation -should directed to perform the action. There are two common ways of -writing that method: - -* do nothing special and simply return a URL to the current rset with - a special view (with `self._cw.build_url(...)` for instance) - -* define an inner function `callback_func(req, *args)` which will do - the work and call it through `self._cw.user_callback(callback_func, - args, msg)`: this method will return a URL which calls the inner - function, and displays the message in the web interface when the - callback has completed (and report any exception occuring in the - callback too) - -Many examples of the first approach are available in :mod:`cubicweb.web.views.actions`. - -Here is an example of the second approach: - -.. sourcecode:: python +should be directed to perform the action. The common way of +writing that method is to simply return a URL to the current rset with a +special view (with `self._cw.build_url(...)` for instance) - from cubicweb.web import action - class SomeAction(action.Action): - __regid__ = 'mycube_some_action' - title = _(some action) - __select__ = action.Action.__select__ & is_instance('TargetEntity') - - def url(self): - if self.cw_row is None: - eids = [row[0] for row in self.cw_rset] - else: - eids = (self.cw_rset[self.cw_row][self.cw_col or 0],) - def do_action(req, eids): - for eid in eids: - entity = req.entity_from_eid(eid, 'TargetEntity') - entity.perform_action() - msg = self._cw._('some_action performed') - return self._cw.user_callback(do_action, (eids,), msg) - +Many examples are available in :mod:`cubicweb.web.views.actions`. """ __docformat__ = "restructuredtext en" diff -r 84738d495ffd -r 793377697c81 web/application.py --- a/web/application.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/application.py Wed Sep 24 18:04:30 2014 +0200 @@ -34,9 +34,9 @@ from cubicweb import ( ValidationError, Unauthorized, Forbidden, AuthenticationError, NoSelectableObject, - BadConnectionId, CW_EVENT_MANAGER) -from cubicweb.dbapi import DBAPISession, anonymous_session -from cubicweb.web import LOGGER, component + CW_EVENT_MANAGER) +from cubicweb.repoapi import anonymous_cnx +from cubicweb.web import LOGGER, component, cors from cubicweb.web import ( StatusResponse, DirectResponse, Redirect, NotFound, LogOut, RemoteCallFailed, InvalidSession, RequestError, PublishException) @@ -50,20 +50,23 @@ @contextmanager def anonymized_request(req): - orig_session = req.session - req.set_session(anonymous_session(req.vreg)) + orig_cnx = req.cnx + anon_clt_cnx = anonymous_cnx(orig_cnx._session.repo) + req.set_cnx(anon_clt_cnx) try: - yield req + with anon_clt_cnx: + yield req finally: - req.set_session(orig_session) + req.set_cnx(orig_cnx) class AbstractSessionManager(component.Component): """manage session data associated to a session identifier""" __regid__ = 'sessionmanager' - def __init__(self, vreg): + def __init__(self, repo): + vreg = repo.vreg self.session_time = vreg.config['http-session-time'] or None - self.authmanager = vreg['components'].select('authmanager', vreg=vreg) + self.authmanager = vreg['components'].select('authmanager', repo=repo) interval = (self.session_time or 0) / 2. if vreg.config.anonymous_user()[0] is not None: self.cleanup_anon_session_time = vreg.config['cleanup-anonymous-session-time'] or 5 * 60 @@ -84,22 +87,15 @@ closed, total = 0, 0 for session in self.current_sessions(): total += 1 - try: - last_usage_time = session.cnx.check() - except AttributeError: - last_usage_time = session.mtime - except BadConnectionId: + last_usage_time = session.mtime + no_use_time = (time() - last_usage_time) + if session.anonymous_session: + if no_use_time >= self.cleanup_anon_session_time: + self.close_session(session) + closed += 1 + elif session_time is not None and no_use_time >= session_time: self.close_session(session) closed += 1 - else: - no_use_time = (time() - last_usage_time) - if session.anonymous_session: - if no_use_time >= self.cleanup_anon_session_time: - self.close_session(session) - closed += 1 - elif session_time is not None and no_use_time >= session_time: - self.close_session(session) - closed += 1 return closed, total - closed def current_sessions(self): @@ -111,8 +107,7 @@ raise NotImplementedError() def open_session(self, req): - """open and return a new session for the given request. The session is - also bound to the request. + """open and return a new session for the given request. raise :exc:`cubicweb.AuthenticationError` if authentication failed (no authentication info found or wrong user/password) @@ -130,8 +125,8 @@ """authenticate user associated to a request and check session validity""" __regid__ = 'authmanager' - def __init__(self, vreg): - self.vreg = vreg + def __init__(self, repo): + self.vreg = repo.vreg def validate_session(self, req, session): """check session validity, reconnecting it to the repository if the @@ -159,9 +154,10 @@ """a session handler using a cookie to store the session identifier""" def __init__(self, appli): + self.repo = appli.repo self.vreg = appli.vreg self.session_manager = self.vreg['components'].select('sessionmanager', - vreg=self.vreg) + repo=self.repo) global SESSION_MANAGER SESSION_MANAGER = self.session_manager if self.vreg.config.mode != 'test': @@ -173,7 +169,7 @@ def reset_session_manager(self): data = self.session_manager.dump_data() self.session_manager = self.vreg['components'].select('sessionmanager', - vreg=self.vreg) + repo=self.repo) self.session_manager.restore_data(data) global SESSION_MANAGER SESSION_MANAGER = self.session_manager @@ -196,66 +192,40 @@ return '__%s_https_session' % self.vreg.config.appid return '__%s_session' % self.vreg.config.appid - def set_session(self, req): - """associate a session to the request + def get_session(self, req): + """Return a session object corresponding to credentials held by the req Session id is searched from : - # form variable - cookie - if no session id is found, open a new session for the connected user - or request authentification as needed + If no session id is found, try opening a new session with credentials + found in the request. - :raise Redirect: if authentication has occurred and succeed + Raises AuthenticationError if no session can be found or created. """ cookie = req.get_cookie() sessioncookie = self.session_cookie(req) try: sessionid = str(cookie[sessioncookie].value) - except KeyError: # no session cookie + session = self.get_session_by_id(req, sessionid) + except (KeyError, InvalidSession): # no valid session cookie session = self.open_session(req) - else: - try: - session = self.get_session(req, sessionid) - except InvalidSession: - # try to open a new session, so we get an anonymous session if - # allowed - session = self.open_session(req) - else: - if not session.cnx: - # session exists but is not bound to a connection. We should - # try to authenticate - loginsucceed = False - try: - if self.open_session(req, allow_no_cnx=False): - loginsucceed = True - except Redirect: - # may be raised in open_session (by postlogin mechanism) - # on successful connection - loginsucceed = True - raise - except AuthenticationError: - # authentication failed, continue to use this session - req.set_session(session) - finally: - if loginsucceed: - # session should be replaced by new session created - # in open_session - self.session_manager.close_session(session) + return session - def get_session(self, req, sessionid): + def get_session_by_id(self, req, sessionid): session = self.session_manager.get_session(req, sessionid) session.mtime = time() return session - def open_session(self, req, allow_no_cnx=True): - session = self.session_manager.open_session(req, allow_no_cnx=allow_no_cnx) + def open_session(self, req): + session = self.session_manager.open_session(req) sessioncookie = self.session_cookie(req) secure = req.https and req.base_url().startswith('https://') req.set_cookie(sessioncookie, session.sessionid, maxage=None, secure=secure) if not session.anonymous_session: - self.session_manager.postlogin(req) + self.session_manager.postlogin(req, session) return session def logout(self, req, goto_url): @@ -277,21 +247,20 @@ The http server will call its main entry point ``application.handle_request``. .. automethod:: cubicweb.web.application.CubicWebPublisher.main_handle_request + + You have to provide both a repository and web-server config at + initialization. In all in one instance both config will be the same. """ - def __init__(self, config, - session_handler_fact=CookieSessionHandler, - vreg=None): + def __init__(self, repo, config, session_handler_fact=CookieSessionHandler): self.info('starting web instance from %s', config.apphome) - if vreg is None: - vreg = cwvreg.CWRegistryStore(config) - self.vreg = vreg - # connect to the repository and get instance's schema - self.repo = config.repository(vreg) - if not vreg.initialized: + self.repo = repo + self.vreg = repo.vreg + # get instance's schema + if not self.vreg.initialized: config.init_cubes(self.repo.get_cubes()) - vreg.init_properties(self.repo.properties()) - vreg.set_schema(self.repo.get_schema()) + self.vreg.init_properties(self.repo.properties()) + self.vreg.set_schema(self.repo.get_schema()) # set the correct publish method if config['query-log-file']: from threading import Lock @@ -310,12 +279,12 @@ self.url_resolver = self.vreg['components'].select('urlpublisher', vreg=self.vreg) - def connect(self, req): - """return a connection for a logged user object according to existing - sessions (i.e. a new connection may be created or an already existing - one may be reused + def get_session(self, req): + """Return a session object corresponding to credentials held by the req + + May raise AuthenticationError. """ - self.session_handler.set_session(req) + return self.session_handler.get_session(req) # publish methods ######################################################### @@ -323,6 +292,21 @@ """wrapper around _publish to log all queries executed for a given accessed path """ + def wrap_set_cnx(func): + def wrap_execute(cnx): + orig_execute = cnx.execute + def execute(rql, kwargs=None, build_descr=True): + tstart, cstart = time(), clock() + rset = orig_execute(rql, kwargs, build_descr=build_descr) + cnx.executed_queries.append((rql, kwargs, time() - tstart, clock() - cstart)) + return rset + return execute + def set_cnx(cnx): + func(cnx) + cnx.execute = wrap_execute(cnx) + cnx.executed_queries = [] + return set_cnx + req.set_cnx = wrap_set_cnx(req.set_cnx) try: return self.main_handle_request(req, path) finally: @@ -362,7 +346,24 @@ req.set_header('WWW-Authenticate', [('Basic', {'realm' : realm })], raw=False) content = '' try: - self.connect(req) + try: + session = self.get_session(req) + from cubicweb import repoapi + cnx = repoapi.ClientConnection(session) + req.set_cnx(cnx) + except AuthenticationError: + # Keep the dummy session set at initialisation. + # such session with work to an some extend but raise an + # AuthenticationError on any database access. + import contextlib + @contextlib.contextmanager + def dummy(): + yield + cnx = dummy() + # XXX We want to clean up this approach in the future. But + # several cubes like registration or forgotten password rely on + # this principle. + # DENY https acces for anonymous_user if (req.https and req.session.anonymous_session @@ -373,7 +374,8 @@ # handler try: ### Try to generate the actual request content - content = self.core_handle(req, path) + with cnx: + content = self.core_handle(req, path) # Handle user log-out except LogOut as ex: # When authentification is handled by cookie the code that @@ -421,6 +423,7 @@ content = self.need_login_content(req) return content + def core_handle(self, req, path): """method called by the main publisher to process @@ -446,6 +449,8 @@ try: ### standard processing of the request try: + # apply CORS sanity checks + cors.process_request(req, self.vreg.config) ctrlid, rset = self.url_resolver.process(req, path) try: controller = self.vreg['controllers'].select(ctrlid, req, @@ -454,6 +459,10 @@ raise Unauthorized(req._('not authorized')) req.update_search_state() result = controller.publish(rset=rset) + except cors.CORSPreflight: + # Return directly an empty 200 + req.status_out = 200 + result = '' except StatusResponse as ex: warn('[3.16] StatusResponse is deprecated use req.status_out', DeprecationWarning, stacklevel=2) @@ -479,7 +488,7 @@ except Unauthorized as ex: req.data['errmsg'] = req._('You\'re not authorized to access this page. ' 'If you think you should, please contact the site administrator.') - req.status_out = httplib.UNAUTHORIZED + req.status_out = httplib.FORBIDDEN result = self.error_handler(req, ex, tb=False) except Forbidden as ex: req.data['errmsg'] = req._('This action is forbidden. ' @@ -506,9 +515,6 @@ req.cnx.rollback() except Exception: pass # ignore rollback error at this point - # request may be referenced by "onetime callback", so clear its entity - # cache to avoid memory usage - req.drop_entity_cache() self.add_undo_link_to_msg(req) self.debug('query %s executed in %s sec', req.relative_path(), clock() - tstart) return result diff -r 84738d495ffd -r 793377697c81 web/captcha.py --- a/web/captcha.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/captcha.py Wed Sep 24 18:04:30 2014 +0200 @@ -24,7 +24,7 @@ from random import randint, choice from cStringIO import StringIO -import Image, ImageFont, ImageDraw, ImageFilter +from PIL import Image, ImageFont, ImageDraw, ImageFilter from time import time diff -r 84738d495ffd -r 793377697c81 web/component.py --- a/web/component.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/component.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -508,25 +508,27 @@ class EditRelationMixIn(ReloadableMixIn): - def box_item(self, entity, etarget, rql, label): + + def box_item(self, entity, etarget, fname, label): """builds HTML link to edit relation between `entity` and `etarget`""" - args = {role(self)[0] : entity.eid, target(self)[0] : etarget.eid} - url = self._cw.user_rql_callback((rql, args)) + args = {role(self) : entity.eid, target(self): etarget.eid} # for each target, provide a link to edit the relation - return u'[%s] %s' % ( - xml_escape(url), label, etarget.view('incontext')) + jscall = js.cw.utils.callAjaxFuncThenReload(fname, + self.rtype, + args['subject'], + args['object']) + return u'[%s] %s' % ( + xml_escape(unicode(jscall)), label, etarget.view('incontext')) def related_boxitems(self, entity): - rql = 'DELETE S %s O WHERE S eid %%(s)s, O eid %%(o)s' % self.rtype - return [self.box_item(entity, etarget, rql, u'-') + return [self.box_item(entity, etarget, 'delete_relation', u'-') for etarget in self.related_entities(entity)] def related_entities(self, entity): return entity.related(self.rtype, role(self), entities=True) def unrelated_boxitems(self, entity): - rql = 'SET S %s O WHERE S eid %%(s)s, O eid %%(o)s' % self.rtype - return [self.box_item(entity, etarget, rql, u'+') + return [self.box_item(entity, etarget, 'add_relation', u'+') for etarget in self.unrelated_entities(entity)] def unrelated_entities(self, entity): diff -r 84738d495ffd -r 793377697c81 web/cors.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/cors.py Wed Sep 24 18:04:30 2014 +0200 @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +# copyright 2014 Logilab, PARIS + +"""A set of utility functions to handle CORS requests + +Unless specified, all references in this file are related to: + http://www.w3.org/TR/cors + +The provided implementation roughly follows: + http://www.html5rocks.com/static/images/cors_server_flowchart.png + +See also: + https://developer.mozilla.org/en-US/docs/HTTP/Access_control_CORS + +""" + +import urlparse + +from cubicweb.web import LOGGER +info = LOGGER.info + +class CORSFailed(Exception): + """Raised when cross origin resource sharing checks failed""" + + +class CORSPreflight(Exception): + """Raised when cross origin resource sharing checks detects the + request as a valid preflight request""" + + +def process_request(req, config): + """ + Process a request to apply CORS specification algorithms + + Check whether the CORS specification is respected and set corresponding + headers to ensure response complies with the specification. + + In case of non-compliance, no CORS-related header is set. + """ + base_url = urlparse.urlsplit(req.base_url()) + expected_host = '://'.join((base_url.scheme, base_url.netloc)) + if not req.get_header('Origin') or req.get_header('Origin') == expected_host: + # not a CORS request, nothing to do + return + try: + # handle cross origin resource sharing (CORS) + if req.http_method() == 'OPTIONS': + if req.get_header('Access-Control-Request-Method'): + # preflight CORS request + process_preflight(req, config) + else: # Simple CORS or actual request + process_simple(req, config) + except CORSFailed, exc: + info('Cross origin resource sharing failed: %s' % exc) + except CORSPreflight: + info('Cross origin resource sharing: valid Preflight request %s') + raise + +def process_preflight(req, config): + """cross origin resource sharing (preflight) + Cf http://www.w3.org/TR/cors/#resource-preflight-requests + """ + origin = check_origin(req, config) + allowed_methods = set(config['access-control-allow-methods']) + allowed_headers = set(config['access-control-allow-headers']) + try: + method = req.get_header('Access-Control-Request-Method') + except ValueError: + raise CORSFailed('Access-Control-Request-Method is incorrect') + if method not in allowed_methods: + raise CORSFailed('Method is not allowed') + try: + req.get_header('Access-Control-Request-Headers', ()) + except ValueError: + raise CORSFailed('Access-Control-Request-Headers is incorrect') + req.set_header('Access-Control-Allow-Methods', allowed_methods, raw=False) + req.set_header('Access-Control-Allow-Headers', allowed_headers, raw=False) + + process_common(req, config, origin) + raise CORSPreflight() + +def process_simple(req, config): + """Handle the Simple Cross-Origin Request case + """ + origin = check_origin(req, config) + exposed_headers = config['access-control-expose-headers'] + if exposed_headers: + req.set_header('Access-Control-Expose-Headers', exposed_headers, raw=False) + process_common(req, config, origin) + +def process_common(req, config, origin): + req.set_header('Access-Control-Allow-Origin', origin) + # in CW, we always support credential/authentication + req.set_header('Access-Control-Allow-Credentials', 'true') + +def check_origin(req, config): + origin = req.get_header('Origin').lower() + allowed_origins = config.get('access-control-allow-origin') + if not allowed_origins: + raise CORSFailed('access-control-allow-origin is not configured') + if '*' not in allowed_origins and origin not in allowed_origins: + raise CORSFailed('Origin is not allowed') + # bit of sanity check; see "6.3 Security" + myhost = urlparse.urlsplit(req.base_url()).netloc + host = req.get_header('Host') + if host != myhost: + info('cross origin resource sharing detected possible ' + 'DNS rebinding attack Host header != host of base_url: ' + '%s != %s' % (host, myhost)) + raise CORSFailed('Host header and hostname do not match') + # include "Vary: Origin" header (see 6.4) + req.set_header('Vary', 'Origin') + return origin + diff -r 84738d495ffd -r 793377697c81 web/data/cubicweb.ajax.js --- a/web/data/cubicweb.ajax.js Wed Sep 24 17:35:59 2014 +0200 +++ b/web/data/cubicweb.ajax.js Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -/* copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +/* copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. * contact http://www.logilab.fr/ -- mailto:contact@logilab.fr * * This file is part of CubicWeb. @@ -312,7 +312,7 @@ $.extend(form, { 'fname': fname, 'pageid': pageid, - 'arg': $.map(cw.utils.sliceList(arguments, 2), jQuery.toJSON) + 'arg': $.map(cw.utils.sliceList(arguments, 2), JSON.stringify) }); return form; } @@ -338,7 +338,6 @@ } else if (this.size() < 1) { cw.log('loadxhtml called without an element'); } - var callback = null; var node = this.get(0); // only consider the first element if (cursor) { setProgressCursor(); @@ -362,9 +361,6 @@ jQuery(node).append(domnode); } _postAjaxLoad(node); - while (jQuery.isFunction(callback)) { - callback = callback.apply(this, [domnode]); - } }); d.addErrback(remoteCallFailed); if (cursor) { @@ -521,16 +517,20 @@ }); } -function userCallback(cbname) { +userCallback = cw.utils.deprecatedFunction( + '[3.19] use a plain ajaxfunc instead of user callbacks', + function userCallback(cbname) { setProgressCursor(); var d = loadRemote(AJAX_BASE_URL, ajaxFuncArgs('user_callback', null, cbname)); d.addCallback(resetCursor); d.addErrback(resetCursor); d.addErrback(remoteCallFailed); return d; -} +}); -function userCallbackThenUpdateUI(cbname, compid, rql, msg, registry, nodeid) { +userCallbackThenUpdateUI = cw.utils.deprecatedFunction( + '[3.19] use a plain ajaxfunc instead of user callbacks', + function userCallbackThenUpdateUI(cbname, compid, rql, msg, registry, nodeid) { var d = userCallback(cbname); d.addCallback(function() { $('#' + nodeid).loadxhtml(AJAX_BASE_URL, ajaxFuncArgs('render', {'rql': rql}, @@ -539,9 +539,11 @@ updateMessage(msg); } }); -} +}); -function userCallbackThenReloadPage(cbname, msg) { +userCallbackThenReloadPage = cw.utils.deprecatedFunction( + '[3.19] use a plain ajaxfunc instead of user callbacks', + function userCallbackThenReloadPage(cbname, msg) { var d = userCallback(cbname); d.addCallback(function() { window.location.reload(); @@ -549,7 +551,7 @@ updateMessage(msg); } }); -} +}); /** * .. function:: unregisterUserCallback(cbname) @@ -557,14 +559,17 @@ * unregisters the python function registered on the server's side * while the page was generated. */ -function unregisterUserCallback(cbname) { +unregisterUserCallback = cw.utils.deprecatedFunction( + '[3.19] use a plain ajaxfunc instead of user callbacks', + function unregisterUserCallback(cbname) { setProgressCursor(); var d = loadRemote(AJAX_BASE_URL, ajaxFuncArgs('unregister_user_callback', null, cbname)); d.addCallback(resetCursor); d.addErrback(resetCursor); d.addErrback(remoteCallFailed); -} +}); + //============= XXX move those functions? ====================================// function openHash() { @@ -749,7 +754,7 @@ var props = { fname: fname, pageid: pageid, - arg: $.map(cw.utils.sliceList(arguments, 1), jQuery.toJSON) + arg: $.map(cw.utils.sliceList(arguments, 1), JSON.stringify) }; var result = jQuery.ajax({ url: AJAX_BASE_URL, @@ -769,7 +774,7 @@ var props = { fname: fname, pageid: pageid, - arg: $.map(cw.utils.sliceList(arguments, 1), jQuery.toJSON) + arg: $.map(cw.utils.sliceList(arguments, 1), JSON.stringify) }; // XXX we should inline the content of loadRemote here var deferred = loadRemote(AJAX_BASE_URL, props, 'POST'); diff -r 84738d495ffd -r 793377697c81 web/data/cubicweb.edition.js --- a/web/data/cubicweb.edition.js Wed Sep 24 17:35:59 2014 +0200 +++ b/web/data/cubicweb.edition.js Wed Sep 24 18:04:30 2014 +0200 @@ -26,7 +26,7 @@ var args = { fname: 'prop_widget', pageid: pageid, - arg: $.map([key, varname, tabindex], jQuery.toJSON) + arg: $.map([key, varname, tabindex], JSON.stringify) }; cw.jqNode('div:value:' + varname).loadxhtml(AJAX_BASE_URL, args, 'post'); } @@ -328,7 +328,7 @@ _postAjaxLoad(dom); }); d.addErrback(function(xxx) { - log('xxx =', xxx); + cw.log('xxx =', xxx); }); } diff -r 84738d495ffd -r 793377697c81 web/data/cubicweb.js --- a/web/data/cubicweb.js Wed Sep 24 17:35:59 2014 +0200 +++ b/web/data/cubicweb.js Wed Sep 24 18:04:30 2014 +0200 @@ -100,7 +100,8 @@ return $node.text(); } return cw.evalJSON(sortvalue); - } + }, + }); @@ -384,11 +385,19 @@ */ strFuncCall: function(fname /* ...*/) { return (fname + '(' + - $.map(cw.utils.sliceList(arguments, 1), jQuery.toJSON).join(',') + $.map(cw.utils.sliceList(arguments, 1), JSON.stringify).join(',') + ')' ); + }, + + callAjaxFuncThenReload: function callAjaxFuncThenReload (/*...*/) { + var d = asyncRemoteExec.apply(null, arguments); + d.addCallback(function(msg) { + window.location.reload(); + if (msg) + updateMessage(msg); + }); } - }); /** DOM factories ************************************************************/ diff -r 84738d495ffd -r 793377697c81 web/data/cubicweb.old.css --- a/web/data/cubicweb.old.css Wed Sep 24 17:35:59 2014 +0200 +++ b/web/data/cubicweb.old.css Wed Sep 24 18:04:30 2014 +0200 @@ -265,32 +265,48 @@ /* header */ table#header { - background: %(headerBg)s; + background-image: linear-gradient(white, #e2e2e2); width: 100%; + border-bottom: 1px solid #bbb; + text-shadow: 1px 1px 0 #f5f5f5; } table#header td { vertical-align: middle; } -table#header a { - color: #000; +table#header, table#header a { + color: #444; } + table#header td#headtext { white-space: nowrap; + padding: 0 10px; + width: 10%; +} + +#logo{ + width: 150px; + height: 42px; + background-image: url(logo-cubicweb.svg); + background-repeat: no-repeat; + background-position: center center; + background-size: contain; + float: left; } table#header td#header-right { - padding-top: 1em; white-space: nowrap; + width: 10%; } table#header td#header-center{ - width: 100%; + border-bottom-left-radius: 10px; + border-top-left-radius: 10px; + padding-left: 1em; } span#appliName { font-weight: bold; - color: #000; white-space: nowrap; } @@ -642,6 +658,8 @@ div#userActionsBox { width: 14em; text-align: right; + display: inline-block; + padding-right: 10px; } div#userActionsBox a.popupMenu { diff -r 84738d495ffd -r 793377697c81 web/data/favicon.ico Binary file web/data/favicon.ico has changed diff -r 84738d495ffd -r 793377697c81 web/data/jquery.json.js --- a/web/data/jquery.json.js Wed Sep 24 17:35:59 2014 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,35 +0,0 @@ -(function($){function toIntegersAtLease(n) -{return n<10?'0'+n:n;} -Date.prototype.toJSON=function(date) -{return date.getUTCFullYear()+'-'+ -toIntegersAtLease(date.getUTCMonth()+1)+'-'+ -toIntegersAtLease(date.getUTCDate());};var escapeable=/["\\\x00-\x1f\x7f-\x9f]/g;var meta={'\b':'\\b','\t':'\\t','\n':'\\n','\f':'\\f','\r':'\\r','"':'\\"','\\':'\\\\'} -$.quoteString=function(string) -{if(escapeable.test(string)) -{return'"'+string.replace(escapeable,function(a) -{var c=meta[a];if(typeof c==='string'){return c;} -c=a.charCodeAt();return'\\u00'+Math.floor(c/16).toString(16)+(c%16).toString(16);})+'"'} -return'"'+string+'"';} -$.toJSON=function(o) -{var type=typeof(o);if(type=="undefined") -return"undefined";else if(type=="number"||type=="boolean") -return o+"";else if(o===null) -return"null";if(type=="string") -{return $.quoteString(o);} -if(type=="object"&&typeof o.toJSON=="function") -return o.toJSON();if(type!="function"&&typeof(o.length)=="number") -{var ret=[];for(var i=0;i + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r 84738d495ffd -r 793377697c81 web/data/logo-cubicweb-icon.svg --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/data/logo-cubicweb-icon.svg Wed Sep 24 18:04:30 2014 +0200 @@ -0,0 +1,100 @@ + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + diff -r 84738d495ffd -r 793377697c81 web/data/logo-cubicweb-text.svg --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/data/logo-cubicweb-text.svg Wed Sep 24 18:04:30 2014 +0200 @@ -0,0 +1,110 @@ + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + diff -r 84738d495ffd -r 793377697c81 web/data/logo-cubicweb.svg --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/data/logo-cubicweb.svg Wed Sep 24 18:04:30 2014 +0200 @@ -0,0 +1,157 @@ + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r 84738d495ffd -r 793377697c81 web/data/uiprops.py --- a/web/data/uiprops.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/data/uiprops.py Wed Sep 24 18:04:30 2014 +0200 @@ -10,7 +10,6 @@ # Javascripts files to include systematically in HTML headers JAVASCRIPTS = [data('jquery.js'), data('jquery-migrate.js'), - data('jquery.json.js'), data('cubicweb.js'), data('cubicweb.compat.js'), data('cubicweb.python.js'), diff -r 84738d495ffd -r 793377697c81 web/facet.py --- a/web/facet.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/facet.py Wed Sep 24 18:04:30 2014 +0200 @@ -34,6 +34,9 @@ .. autoclass:: cubicweb.web.facet.RangeFacet .. autoclass:: cubicweb.web.facet.DateRangeFacet .. autoclass:: cubicweb.web.facet.BitFieldFacet +.. autoclass:: cubicweb.web.facet.AbstractRangeRQLPathFacet +.. autoclass:: cubicweb.web.facet.RangeRQLPathFacet +.. autoclass:: cubicweb.web.facet.DateRangeRQLPathFacet Classes for facets implementor ------------------------------ @@ -1300,7 +1303,6 @@ self.target_attr_type, operator) - class DateRangeFacet(RangeFacet): """This class works similarly as the :class:`RangeFacet` but for attribute of date type. @@ -1324,6 +1326,110 @@ return '"%s"' % ustrftime(date_value, '%Y/%m/%d') +class AbstractRangeRQLPathFacet(RQLPathFacet): + """ + The :class:`AbstractRangeRQLPathFacet` is the base class for + RQLPathFacet-type facets allowing the use of RangeWidgets-like + widgets (such as (:class:`FacetRangeWidget`, + class:`DateFacetRangeWidget`) on the parent :class:`RQLPathFacet` + target attribute. + """ + __abstract__ = True + + def vocabulary(self): + """return vocabulary for this facet, eg a list of (label, + value)""" + select = self.select + select.save_state() + try: + filtered_variable = self.filtered_variable + cleanup_select(select, filtered_variable) + varmap, restrvar = self.add_path_to_select() + if self.label_variable: + attrvar = varmap[self.label_variable] + else: + attrvar = restrvar + # start RangeRQLPathFacet + minf = nodes.Function('MIN') + minf.append(nodes.VariableRef(restrvar)) + select.add_selected(minf) + maxf = nodes.Function('MAX') + maxf.append(nodes.VariableRef(restrvar)) + select.add_selected(maxf) + # add is restriction if necessary + if filtered_variable.stinfo['typerel'] is None: + etypes = frozenset(sol[filtered_variable.name] for sol in select.solutions) + select.add_type_restriction(filtered_variable, etypes) + # end RangeRQLPathFacet + try: + rset = self.rqlexec(select.as_string(), self.cw_rset.args) + except Exception: + self.exception('error while getting vocabulary for %s, rql: %s', + self, select.as_string()) + return () + finally: + select.recover() + # don't call rset_vocabulary on empty result set, it may be an empty + # *list* (see rqlexec implementation) + if rset: + minv, maxv = rset[0] + return [(unicode(minv), minv), (unicode(maxv), maxv)] + return [] + + + def possible_values(self): + """return a list of possible values (as string since it's used to + compare to a form value in javascript) for this facet + """ + return [strval for strval, val in self.vocabulary()] + + def add_rql_restrictions(self): + infvalue = self.infvalue() + supvalue = self.supvalue() + if infvalue is None or supvalue is None: # nothing sent + return + varmap, restrvar = self.add_path_to_select( + skiplabel=True, skipattrfilter=True) + restrel = None + for part in self.path: + if isinstance(part, basestring): + part = part.split() + subject, rtype, object = part + if object == self.filter_variable: + restrel = rtype + assert restrel + # when a value is equal to one of the limit, don't add the restriction, + # else we filter out NULL values implicitly + if infvalue != self.infvalue(min=True): + + self._add_restriction(infvalue, '>=', restrvar, restrel) + if supvalue != self.supvalue(max=True): + self._add_restriction(supvalue, '<=', restrvar, restrel) + + def _add_restriction(self, value, operator, restrvar, restrel): + self.select.add_constant_restriction(restrvar, + restrel, + self.formatvalue(value), + self.target_attr_type, operator) + + +class RangeRQLPathFacet(AbstractRangeRQLPathFacet, RQLPathFacet): + """ + The :class:`RangeRQLPathFacet` uses the :class:`FacetRangeWidget` + on the :class:`AbstractRangeRQLPathFacet` target attribute + """ + pass + + +class DateRangeRQLPathFacet(AbstractRangeRQLPathFacet, DateRangeFacet): + """ + The :class:`DateRangeRQLPathFacet` uses the + :class:`DateFacetRangeWidget` on the + :class:`AbstractRangeRQLPathFacet` target attribute + """ + pass + + class HasRelationFacet(AbstractFacet): """This class simply filter according to the presence of a relation (whatever the entity at the other end). It display a simple checkbox that diff -r 84738d495ffd -r 793377697c81 web/formfields.py --- a/web/formfields.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/formfields.py Wed Sep 24 18:04:30 2014 +0200 @@ -1033,6 +1033,10 @@ # while it has no value, hence generating a false error. return list(self.fields) + @property + def needs_multipart(self): + return any(f.needs_multipart for f in self.fields) + class RelationField(Field): """Use this field to edit a relation of an entity. diff -r 84738d495ffd -r 793377697c81 web/formwidgets.py --- a/web/formwidgets.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/formwidgets.py Wed Sep 24 18:04:30 2014 +0200 @@ -34,6 +34,7 @@ .. autoclass:: cubicweb.web.formwidgets.HiddenInput .. autoclass:: cubicweb.web.formwidgets.TextInput +.. autoclass:: cubicweb.web.formwidgets.EmailInput .. autoclass:: cubicweb.web.formwidgets.PasswordSingleInput .. autoclass:: cubicweb.web.formwidgets.FileInput .. autoclass:: cubicweb.web.formwidgets.ButtonInput @@ -314,6 +315,11 @@ type = 'text' +class EmailInput(Input): + """Simple , will return a unicode string.""" + type = 'email' + + class PasswordSingleInput(Input): """Simple , will return a utf-8 encoded string. diff -r 84738d495ffd -r 793377697c81 web/htmlwidgets.py --- a/web/htmlwidgets.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/htmlwidgets.py Wed Sep 24 18:04:30 2014 +0200 @@ -153,8 +153,6 @@ else: return u'
  • ' % self.liclass - return self.label - def _render(self): self.w(u'%s%s
  • ' % (self._start_li(), self.label)) diff -r 84738d495ffd -r 793377697c81 web/http_headers.py --- a/web/http_headers.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/http_headers.py Wed Sep 24 18:04:30 2014 +0200 @@ -8,6 +8,7 @@ from calendar import timegm import base64 import re +import urlparse def dashCapitalize(s): ''' Capitalize a string, making sure to treat - as a word seperator ''' @@ -27,11 +28,11 @@ def casemappingify(d): global header_case_mapping - newd = dict([(key.lower(),key) for key in d]) + newd = dict([(key.lower(), key) for key in d]) header_case_mapping.update(newd) def lowerify(d): - return dict([(key.lower(),value) for key,value in d.items()]) + return dict([(key.lower(), value) for key, value in d.items()]) class HeaderHandler(object): @@ -73,13 +74,13 @@ try: for p in parser: - # print "Parsing %s: %s(%s)" % (name, repr(p), repr(h)) + #print "==> Parsing %s: %s(%s)" % (name, repr(p), repr(header)) header = p(header) # if isinstance(h, types.GeneratorType): - # h=list(h) + # h = list(h) except ValueError as v: # print v - header=None + header = None return header @@ -187,7 +188,7 @@ # Two digit year, yucko. day, month, year = parts[1].split('-') time = parts[2] - year=int(year) + year = int(year) if year < 69: year = year + 2000 elif year < 100: @@ -242,8 +243,8 @@ Takes a raw header value (list of strings), and Returns a generator of strings and Token class instances. """ - tokens=http_tokens - ctls=http_ctls + tokens = http_tokens + ctls = http_ctls string = ",".join(header) list = [] @@ -265,7 +266,7 @@ elif x == '"': quoted = False yield qstring+string[start:cur] - qstring=None + qstring = None start = cur+1 elif x in tokens: if start != cur: @@ -339,7 +340,7 @@ hurt anything, in any case. """ - l=[] + l = [] for x in seq: if not isinstance(x, Token): l.append(x) @@ -353,16 +354,16 @@ def parseKeyValue(val): if len(val) == 1: - return val[0],None + return val[0], None elif len(val) == 3 and val[1] == Token('='): - return val[0],val[2] + return val[0], val[2] raise ValueError, "Expected key or key=value, but got %s." % (val,) def parseArgs(field): - args=split(field, Token(';')) + args = split(field, Token(';')) val = args.next() args = [parseKeyValue(arg) for arg in args] - return val,args + return val, args def listParser(fun): """Return a function which applies 'fun' to every element in the @@ -377,8 +378,44 @@ def last(seq): """Return seq[-1]""" + return seq[-1] - return seq[-1] +def unique(seq): + '''if seq is not a string, check it's a sequence of one element and return it''' + if isinstance(seq, basestring): + return seq + if len(seq) != 1: + raise ValueError('single value required, not %s' % seq) + return seq[0] + +def parseHTTPMethod(method): + """Ensure a HTTP method is valid according the rfc2616, but extension-method ones""" + method = method.strip() + if method not in ("OPTIONS", "GET", "HEAD", "POST", "PUT", "DELETE", + "TRACE", "CONNECT"): + raise ValueError('Unsupported HTTP method %s' % method) + return method + +def parseAllowOrigin(origin): + """Ensure origin is a valid URL-base stuff, or null""" + if origin == 'null': + return origin + p = urlparse.urlparse(origin) + if p.params or p.query or p.username or p.path not in ('', '/'): + raise ValueError('Incorrect Accept-Control-Allow-Origin value %s' % origin) + if p.scheme not in ('http', 'https'): + raise ValueError('Unsupported Accept-Control-Allow-Origin URL scheme %s' % origin) + if not p.netloc: + raise ValueError('Accept-Control-Allow-Origin: host name cannot be unset (%s)' % origin) + return origin + +def parseAllowCreds(cred): + """Can be "true" """ + if cred: + cred = cred.lower() + if cred and cred != 'true': + raise ValueError('Accept-Control-Allow-Credentials can only be "true" (%s)' % cred) + return cred ##### Generation utilities def quoteString(s): @@ -401,11 +438,11 @@ def generateKeyValues(kvs): l = [] # print kvs - for k,v in kvs: + for k, v in kvs: if v is None: l.append('%s' % k) else: - l.append('%s=%s' % (k,v)) + l.append('%s=%s' % (k, v)) return ";".join(l) @@ -453,7 +490,7 @@ ##### Specific header parsers. def parseAccept(field): - type,args = parseArgs(field) + type, args = parseArgs(field) if len(type) != 3 or type[1] != Token('/'): raise ValueError, "MIME Type "+str(type)+" invalid." @@ -465,30 +502,30 @@ num = 0 for arg in args: if arg[0] == 'q': - mimeparams=tuple(args[0:num]) - params=args[num:] + mimeparams = tuple(args[0:num]) + params = args[num:] break num = num + 1 else: - mimeparams=tuple(args) - params=[] + mimeparams = tuple(args) + params = [] # Default values for parameters: qval = 1.0 # Parse accept parameters: for param in params: - if param[0] =='q': + if param[0] == 'q': qval = float(param[1]) else: # Warn? ignored parameter. pass - ret = MimeType(type[0],type[2],mimeparams),qval + ret = MimeType(type[0], type[2], mimeparams), qval return ret def parseAcceptQvalue(field): - type,args=parseArgs(field) + type, args = parseArgs(field) type = checkSingleToken(type) @@ -496,7 +533,7 @@ for arg in args: if arg[0] == 'q': qvalue = float(arg[1]) - return type,qvalue + return type, qvalue def addDefaultCharset(charsets): if charsets.get('*') is None and charsets.get('iso-8859-1') is None: @@ -516,7 +553,7 @@ # Content-Type: multipart/form-data; boundary=CaSeFuLsTuFf # So, we need to explicitly .lower() the type/subtype and arg keys. - type,args = parseArgs(header) + type, args = parseArgs(header) if len(type) != 3 or type[1] != Token('/'): raise ValueError, "MIME Type "+str(type)+" invalid." @@ -535,14 +572,14 @@ """Parse a content-range header into (kind, start, end, realLength). realLength might be None if real length is not known ('*'). - start and end might be None if start,end unspecified (for response code 416) + start and end might be None if start, end unspecified (for response code 416) """ kind, other = header.strip().split() if kind.lower() != "bytes": raise ValueError("a range of type %r is not supported") startend, realLength = other.split("/") if startend.strip() == '*': - start,end=None,None + start, end = None, None else: start, end = map(int, startend.split("-")) if realLength == "*": @@ -552,9 +589,9 @@ return (kind, start, end, realLength) def parseExpect(field): - type,args=parseArgs(field) + type, args = parseArgs(field) - type=parseKeyValue(type) + type = parseKeyValue(type) return (type[0], (lambda *args:args)(type[1], *args)) def parseExpires(header): @@ -586,16 +623,16 @@ if len(range) < 3 or range[1] != Token('='): raise ValueError("Invalid range header format: %s" %(range,)) - type=range[0] + type = range[0] if type != 'bytes': raise ValueError("Unknown range unit: %s." % (type,)) - rangeset=split(range[2:], Token(',')) + rangeset = split(range[2:], Token(',')) ranges = [] for byterangespec in rangeset: if len(byterangespec) != 1: raise ValueError("Invalid range header format: %s" % (range,)) - start,end=byterangespec[0].split('-') + start, end = byterangespec[0].split('-') if not start and not end: raise ValueError("Invalid range header format: %s" % (range,)) @@ -612,8 +649,8 @@ if start and end and start > end: raise ValueError("Invalid range header, start > end: %s" % (range,)) - ranges.append((start,end)) - return type,ranges + ranges.append((start, end)) + return type, ranges def parseRetryAfter(header): try: @@ -676,9 +713,9 @@ #### Header generators def generateAccept(accept): - mimeType,q = accept + mimeType, q = accept - out="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype) + out ="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype) if mimeType.params: out+=';'+generateKeyValues(mimeType.params.iteritems()) @@ -724,7 +761,7 @@ # quoted list of values v = quoteString(generateList( [header_case_mapping.get(name) or dashCapitalize(name) for name in v])) - return '%s=%s' % (k,v) + return '%s=%s' % (k, v) def generateContentRange(tup): """tup is (type, start, end, len) @@ -767,7 +804,7 @@ return '' return s - type,ranges=range + type, ranges = range if type != 'bytes': raise ValueError("Unknown range unit: "+type+".") @@ -781,9 +818,9 @@ return str(int(when - time.time())) def generateContentType(mimeType): - out="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype) + out = "%s/%s" % (mimeType.mediaType, mimeType.mediaSubtype) if mimeType.params: - out+=';'+generateKeyValues(mimeType.params.iteritems()) + out += ';' + generateKeyValues(mimeType.params.iteritems()) return out def generateIfRange(dateOrETag): @@ -804,7 +841,7 @@ try: l = [] - for k,v in dict(challenge).iteritems(): + for k, v in dict(challenge).iteritems(): l.append("%s=%s" % (k, quoteString(v))) _generated.append("%s %s" % (scheme, ", ".join(l))) @@ -849,7 +886,7 @@ return "Etag(%r, weak=%r)" % (self.tag, self.weak) def parse(tokens): - tokens=tuple(tokens) + tokens = tuple(tokens) if len(tokens) == 1 and not isinstance(tokens[0], Token): return ETag(tokens[0]) @@ -859,7 +896,7 @@ raise ValueError("Invalid ETag.") - parse=staticmethod(parse) + parse = staticmethod(parse) def generate(self): if self.weak: @@ -868,14 +905,14 @@ return quoteString(self.tag) def parseStarOrETag(tokens): - tokens=tuple(tokens) + tokens = tuple(tokens) if tokens == ('*',): return '*' else: return ETag.parse(tokens) def generateStarOrETag(etag): - if etag=='*': + if etag == '*': return etag else: return etag.generate() @@ -885,20 +922,20 @@ # __slots__ = ['name', 'value', 'path', 'domain', 'ports', 'expires', 'discard', 'secure', 'comment', 'commenturl', 'version'] def __init__(self, name, value, path=None, domain=None, ports=None, expires=None, discard=False, secure=False, comment=None, commenturl=None, version=0): - self.name=name - self.value=value - self.path=path - self.domain=domain - self.ports=ports - self.expires=expires - self.discard=discard - self.secure=secure - self.comment=comment - self.commenturl=commenturl - self.version=version + self.name = name + self.value = value + self.path = path + self.domain = domain + self.ports = ports + self.expires = expires + self.discard = discard + self.secure = secure + self.comment = comment + self.commenturl = commenturl + self.version = version def __repr__(self): - s="Cookie(%r=%r" % (self.name, self.value) + s = "Cookie(%r=%r" % (self.name, self.value) if self.path is not None: s+=", path=%r" % (self.path,) if self.domain is not None: s+=", domain=%r" % (self.domain,) if self.ports is not None: s+=", ports=%r" % (self.ports,) @@ -941,7 +978,7 @@ header = ';'.join(headers) if header[0:8].lower() == "$version": # RFC2965 cookie - h=tokenize([header], foldCase=False) + h = tokenize([header], foldCase=False) r_cookies = split(h, Token(',')) for r_cookie in r_cookies: last_cookie = None @@ -954,20 +991,20 @@ (name,), = nameval value = None - name=name.lower() + name = name.lower() if name == '$version': continue if name[0] == '$': if last_cookie is not None: if name == '$path': - last_cookie.path=value + last_cookie.path = value elif name == '$domain': - last_cookie.domain=value + last_cookie.domain = value elif name == '$port': if value is None: last_cookie.ports = () else: - last_cookie.ports=tuple([int(s) for s in value.split(',')]) + last_cookie.ports = tuple([int(s) for s in value.split(',')]) else: last_cookie = Cookie(name, value, version=1) cookies.append(last_cookie) @@ -978,9 +1015,9 @@ # however. r_cookies = header.split(';') for r_cookie in r_cookies: - name,value = r_cookie.split('=', 1) - name=name.strip(' \t') - value=value.strip(' \t') + name, value = r_cookie.split('=', 1) + name = name.strip(' \t') + value = value.strip(' \t') cookies.append(Cookie(name, value)) @@ -1048,7 +1085,7 @@ if cookie_validname_re.match(cookie.name) is None: continue - value=cookie.value + value = cookie.value if cookie_validvalue_re.match(cookie.value) is None: value = quoteString(value) @@ -1078,13 +1115,13 @@ for part in parts: namevalue = part.split('=',1) if len(namevalue) == 1: - name=namevalue[0] - value=None + name = namevalue[0] + value = None else: - name,value=namevalue - value=value.strip(' \t') + name, value = namevalue + value = value.strip(' \t') - name=name.strip(' \t') + name = name.strip(' \t') l.append((name, value)) @@ -1115,7 +1152,7 @@ cookie = Cookie(name, value) hadMaxAge = False - for name,value in tup[1:]: + for name, value in tup[1:]: name = name.lower() if value is None: @@ -1229,15 +1266,15 @@ # def getMimeQuality(mimeType, accepts): -# type,args = parseArgs(mimeType) -# type=type.split(Token('/')) +# type, args = parseArgs(mimeType) +# type = type.split(Token('/')) # if len(type) != 2: # raise ValueError, "MIME Type "+s+" invalid." # for accept in accepts: -# accept,acceptQual=accept -# acceptType=accept[0:1] -# acceptArgs=accept[2] +# accept, acceptQual = accept +# acceptType = accept[0:1] +# acceptArgs = accept[2] # if ((acceptType == type or acceptType == (type[0],'*') or acceptType==('*','*')) and # (args == acceptArgs or len(acceptArgs) == 0)): @@ -1299,7 +1336,7 @@ def getRawHeaders(self, name, default=None): """Returns a list of headers matching the given name as the raw string given.""" - name=name.lower() + name = name.lower() raw_header = self._raw_headers.get(name, default) if raw_header is not _RecalcNeeded: return raw_header @@ -1314,7 +1351,7 @@ If the header doesn't exist, return default (or None if not specified) """ - name=name.lower() + name = name.lower() parsed = self._headers.get(name, default) if parsed is not _RecalcNeeded: return parsed @@ -1325,7 +1362,7 @@ Value should be a list of strings, each being one header of the given name. """ - name=name.lower() + name = name.lower() self._raw_headers[name] = value self._headers[name] = _RecalcNeeded @@ -1334,7 +1371,7 @@ Value should be a list of objects whose exact form depends on the header in question. """ - name=name.lower() + name = name.lower() self._raw_headers[name] = _RecalcNeeded self._headers[name] = value @@ -1344,7 +1381,7 @@ If it exists, add it as a separate header to output; do not replace anything. """ - name=name.lower() + name = name.lower() raw_header = self._raw_headers.get(name) if raw_header is None: # No header yet @@ -1362,7 +1399,7 @@ If it exists, add it as a separate header to output; do not replace anything. """ - name=name.lower() + name = name.lower() header = self._headers.get(name) if header is None: # No header yet @@ -1375,7 +1412,7 @@ def removeHeader(self, name): """Removes the header named.""" - name=name.lower() + name = name.lower() if name in self._raw_headers: del self._raw_headers[name] del self._headers[name] @@ -1389,10 +1426,10 @@ return header_case_mapping.get(name) or dashCapitalize(name) def getAllRawHeaders(self): - """Return an iterator of key,value pairs of all headers + """Return an iterator of key, value pairs of all headers contained in this object, as strings. The keys are capitalized in canonical capitalization.""" - for k,v in self._raw_headers.iteritems(): + for k, v in self._raw_headers.iteritems(): if v is _RecalcNeeded: v = self._toRaw(k) yield self.canonicalNameCaps(k), v @@ -1418,24 +1455,24 @@ parser_general_headers = { - 'Cache-Control':(tokenize, listParser(parseCacheControl), dict), - 'Connection':(tokenize,filterTokens), - 'Date':(last,parseDateTime), + 'Cache-Control': (tokenize, listParser(parseCacheControl), dict), + 'Connection': (tokenize, filterTokens), + 'Date': (last, parseDateTime), # 'Pragma':tokenize # 'Trailer':tokenize - 'Transfer-Encoding':(tokenize,filterTokens), + 'Transfer-Encoding': (tokenize, filterTokens), # 'Upgrade':tokenize -# 'Via':tokenize,stripComment +# 'Via':tokenize, stripComment # 'Warning':tokenize } generator_general_headers = { - 'Cache-Control':(iteritems, listGenerator(generateCacheControl), singleHeader), - 'Connection':(generateList,singleHeader), - 'Date':(generateDateTime,singleHeader), + 'Cache-Control': (iteritems, listGenerator(generateCacheControl), singleHeader), + 'Connection': (generateList, singleHeader), + 'Date': (generateDateTime, singleHeader), # 'Pragma': # 'Trailer': - 'Transfer-Encoding':(generateList,singleHeader), + 'Transfer-Encoding': (generateList, singleHeader), # 'Upgrade': # 'Via': # 'Warning': @@ -1444,102 +1481,114 @@ parser_request_headers = { 'Accept': (tokenize, listParser(parseAccept), dict), 'Accept-Charset': (tokenize, listParser(parseAcceptQvalue), dict, addDefaultCharset), - 'Accept-Encoding':(tokenize, listParser(parseAcceptQvalue), dict, addDefaultEncoding), - 'Accept-Language':(tokenize, listParser(parseAcceptQvalue), dict), + 'Accept-Encoding': (tokenize, listParser(parseAcceptQvalue), dict, addDefaultEncoding), + 'Accept-Language': (tokenize, listParser(parseAcceptQvalue), dict), + 'Access-Control-Allow-Origin': (last, parseAllowOrigin,), + 'Access-Control-Allow-Credentials': (last, parseAllowCreds,), + 'Access-Control-Allow-Methods': (tokenize, listParser(parseHTTPMethod), list), + 'Access-Control-Request-Method': (parseHTTPMethod, ), + 'Access-Control-Request-Headers': (filterTokens, ), + 'Access-Control-Expose-Headers': (filterTokens, ), 'Authorization': (last, parseAuthorization), - 'Cookie':(parseCookie,), - 'Expect':(tokenize, listParser(parseExpect), dict), - 'From':(last,), - 'Host':(last,), - 'If-Match':(tokenize, listParser(parseStarOrETag), list), - 'If-Modified-Since':(last, parseIfModifiedSince), - 'If-None-Match':(tokenize, listParser(parseStarOrETag), list), - 'If-Range':(parseIfRange,), - 'If-Unmodified-Since':(last,parseDateTime), - 'Max-Forwards':(last,int), + 'Cookie': (parseCookie,), + 'Expect': (tokenize, listParser(parseExpect), dict), + 'Origin': (last,), + 'From': (last,), + 'Host': (last,), + 'If-Match': (tokenize, listParser(parseStarOrETag), list), + 'If-Modified-Since': (last, parseIfModifiedSince), + 'If-None-Match': (tokenize, listParser(parseStarOrETag), list), + 'If-Range': (parseIfRange,), + 'If-Unmodified-Since': (last, parseDateTime), + 'Max-Forwards': (last, int), # 'Proxy-Authorization':str, # what is "credentials" - 'Range':(tokenize, parseRange), - 'Referer':(last,str), # TODO: URI object? - 'TE':(tokenize, listParser(parseAcceptQvalue), dict), - 'User-Agent':(last,str), + 'Range': (tokenize, parseRange), + 'Referer': (last, str), # TODO: URI object? + 'TE': (tokenize, listParser(parseAcceptQvalue), dict), + 'User-Agent': (last, str), } generator_request_headers = { - 'Accept': (iteritems,listGenerator(generateAccept),singleHeader), - 'Accept-Charset': (iteritems, listGenerator(generateAcceptQvalue),singleHeader), - 'Accept-Encoding': (iteritems, removeDefaultEncoding, listGenerator(generateAcceptQvalue),singleHeader), - 'Accept-Language': (iteritems, listGenerator(generateAcceptQvalue),singleHeader), + 'Accept': (iteritems, listGenerator(generateAccept), singleHeader), + 'Accept-Charset': (iteritems, listGenerator(generateAcceptQvalue), singleHeader), + 'Accept-Encoding': (iteritems, removeDefaultEncoding, + listGenerator(generateAcceptQvalue), singleHeader), + 'Accept-Language': (iteritems, listGenerator(generateAcceptQvalue), singleHeader), + 'Access-Control-Request-Method': (unique, str, singleHeader, ), + 'Access-Control-Expose-Headers': (listGenerator(str), ), + 'Access-Control-Allow-Headers': (listGenerator(str), ), 'Authorization': (generateAuthorization,), # what is "credentials" - 'Cookie':(generateCookie,singleHeader), - 'Expect':(iteritems, listGenerator(generateExpect), singleHeader), - 'From':(str,singleHeader), - 'Host':(str,singleHeader), - 'If-Match':(listGenerator(generateStarOrETag), singleHeader), - 'If-Modified-Since':(generateDateTime,singleHeader), - 'If-None-Match':(listGenerator(generateStarOrETag), singleHeader), - 'If-Range':(generateIfRange, singleHeader), - 'If-Unmodified-Since':(generateDateTime,singleHeader), - 'Max-Forwards':(str, singleHeader), + 'Cookie': (generateCookie, singleHeader), + 'Expect': (iteritems, listGenerator(generateExpect), singleHeader), + 'From': (unique, str, singleHeader), + 'Host': (unique, str, singleHeader), + 'If-Match': (listGenerator(generateStarOrETag), singleHeader), + 'If-Modified-Since': (generateDateTime, singleHeader), + 'If-None-Match': (listGenerator(generateStarOrETag), singleHeader), + 'If-Range': (generateIfRange, singleHeader), + 'If-Unmodified-Since': (generateDateTime, singleHeader), + 'Max-Forwards': (unique, str, singleHeader), + 'Origin': (unique, str, singleHeader), # 'Proxy-Authorization':str, # what is "credentials" - 'Range':(generateRange,singleHeader), - 'Referer':(str,singleHeader), - 'TE': (iteritems, listGenerator(generateAcceptQvalue),singleHeader), - 'User-Agent':(str,singleHeader), + 'Range': (generateRange, singleHeader), + 'Referer': (unique, str, singleHeader), + 'TE': (iteritems, listGenerator(generateAcceptQvalue), singleHeader), + 'User-Agent': (unique, str, singleHeader), } parser_response_headers = { - 'Accept-Ranges':(tokenize, filterTokens), - 'Age':(last,int), - 'ETag':(tokenize, ETag.parse), - 'Location':(last,), # TODO: URI object? + 'Accept-Ranges': (tokenize, filterTokens), + 'Age': (last, int), + 'ETag': (tokenize, ETag.parse), + 'Location': (last,), # TODO: URI object? # 'Proxy-Authenticate' - 'Retry-After':(last, parseRetryAfter), - 'Server':(last,), - 'Set-Cookie':(parseSetCookie,), - 'Set-Cookie2':(tokenize, parseSetCookie2), - 'Vary':(tokenize, filterTokens), + 'Retry-After': (last, parseRetryAfter), + 'Server': (last,), + 'Set-Cookie': (parseSetCookie,), + 'Set-Cookie2': (tokenize, parseSetCookie2), + 'Vary': (tokenize, filterTokens), 'WWW-Authenticate': (lambda h: tokenize(h, foldCase=False), parseWWWAuthenticate,) } generator_response_headers = { - 'Accept-Ranges':(generateList, singleHeader), - 'Age':(str, singleHeader), - 'ETag':(ETag.generate, singleHeader), - 'Location':(str, singleHeader), + 'Accept-Ranges': (generateList, singleHeader), + 'Age': (unique, str, singleHeader), + 'ETag': (ETag.generate, singleHeader), + 'Location': (unique, str, singleHeader), # 'Proxy-Authenticate' - 'Retry-After':(generateRetryAfter, singleHeader), - 'Server':(str, singleHeader), - 'Set-Cookie':(generateSetCookie,), - 'Set-Cookie2':(generateSetCookie2,), - 'Vary':(generateList, singleHeader), - 'WWW-Authenticate':(generateWWWAuthenticate,) + 'Retry-After': (generateRetryAfter, singleHeader), + 'Server': (unique, str, singleHeader), + 'Set-Cookie': (generateSetCookie,), + 'Set-Cookie2': (generateSetCookie2,), + 'Vary': (set, generateList, singleHeader), + 'WWW-Authenticate': (generateWWWAuthenticate,) } parser_entity_headers = { - 'Allow':(lambda str:tokenize(str, foldCase=False), filterTokens), - 'Content-Encoding':(tokenize, filterTokens), - 'Content-Language':(tokenize, filterTokens), - 'Content-Length':(last, int), - 'Content-Location':(last,), # TODO: URI object? - 'Content-MD5':(last, parseContentMD5), - 'Content-Range':(last, parseContentRange), - 'Content-Type':(lambda str:tokenize(str, foldCase=False), parseContentType), - 'Expires':(last, parseExpires), - 'Last-Modified':(last, parseDateTime), + 'Allow': (lambda str:tokenize(str, foldCase=False), filterTokens), + 'Content-Encoding': (tokenize, filterTokens), + 'Content-Language': (tokenize, filterTokens), + 'Content-Length': (last, int), + 'Content-Location': (last,), # TODO: URI object? + 'Content-MD5': (last, parseContentMD5), + 'Content-Range': (last, parseContentRange), + 'Content-Type': (lambda str:tokenize(str, foldCase=False), parseContentType), + 'Expires': (last, parseExpires), + 'Last-Modified': (last, parseDateTime), } generator_entity_headers = { - 'Allow':(generateList, singleHeader), - 'Content-Encoding':(generateList, singleHeader), - 'Content-Language':(generateList, singleHeader), - 'Content-Length':(str, singleHeader), - 'Content-Location':(str, singleHeader), - 'Content-MD5':(base64.encodestring, lambda x: x.strip("\n"), singleHeader), - 'Content-Range':(generateContentRange, singleHeader), - 'Content-Type':(generateContentType, singleHeader), - 'Expires':(generateDateTime, singleHeader), - 'Last-Modified':(generateDateTime, singleHeader), + 'Allow': (generateList, singleHeader), + 'Content-Encoding': (generateList, singleHeader), + 'Content-Language': (generateList, singleHeader), + 'Content-Length': (unique, str, singleHeader), + 'Content-Location': (unique, str, singleHeader), + 'Content-MD5': (base64.encodestring, lambda x: x.strip("\n"), singleHeader), + 'Content-Range': (generateContentRange, singleHeader), + 'Content-Type': (generateContentType, singleHeader), + 'Expires': (generateDateTime, singleHeader), + 'Last-Modified': (generateDateTime, singleHeader), } DefaultHTTPHandler.updateParsers(parser_general_headers) diff -r 84738d495ffd -r 793377697c81 web/request.py --- a/web/request.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/request.py Wed Sep 24 18:04:30 2014 +0200 @@ -30,7 +30,6 @@ from datetime import date, datetime from urlparse import urlsplit import httplib -from itertools import count from warnings import warn from rql.utils import rqlvar_maker @@ -39,6 +38,7 @@ from logilab.common.deprecation import deprecated from logilab.mtconverter import xml_escape +from cubicweb.req import RequestSessionBase from cubicweb.dbapi import DBAPIRequest from cubicweb.uilib import remove_html_tags, js from cubicweb.utils import SizeConstrainedList, HTMLHead, make_uid @@ -81,20 +81,39 @@ return [v for v in value if v != INTERNAL_FIELD_VALUE] +class Counter(object): + """A picklable counter object, usable for e.g. page tab index count""" + __slots__ = ('value',) -class CubicWebRequestBase(DBAPIRequest): + def __init__(self, initialvalue=0): + self.value = initialvalue + + def __call__(self): + value = self.value + self.value += 1 + return value + + def __getstate__(self): + return {'value': self.value} + + def __setstate__(self, state): + self.value = state['value'] + + +class _CubicWebRequestBase(RequestSessionBase): """abstract HTTP request, should be extended according to the HTTP backend Immutable attributes that describe the received query and generic configuration """ ajax_request = False # to be set to True by ajax controllers - def __init__(self, vreg, https=False, form=None, headers={}): + def __init__(self, vreg, https=False, form=None, headers=None): """ :vreg: Vregistry, :https: boolean, s this a https request :form: Forms value + :headers: dict, request header """ - super(CubicWebRequestBase, self).__init__(vreg) + super(_CubicWebRequestBase, self).__init__(vreg) #: (Boolean) Is this an https request. self.https = https #: User interface property (vary with https) (see :ref:`uiprops`) @@ -113,12 +132,16 @@ self.html_headers = HTMLHead(self) #: received headers self._headers_in = Headers() - for k, v in headers.iteritems(): - self._headers_in.addRawHeader(k, v) + if headers is not None: + for k, v in headers.iteritems(): + self._headers_in.addRawHeader(k, v) #: form parameters self.setup_params(form) #: received body self.content = StringIO() + # set up language based on request headers or site default (we don't + # have a user yet, and might not get one) + self.set_user_language(None) #: dictionary that may be used to store request data that has to be #: shared among various components used to publish the request (views, #: controller, application...) @@ -169,7 +192,7 @@ if secure: base_url = self.vreg.config.get('https-url') if base_url is None: - base_url = super(CubicWebRequestBase, self).base_url() + base_url = super(_CubicWebRequestBase, self).base_url() return base_url @property @@ -195,7 +218,7 @@ def next_tabindex(self): nextfunc = self.get_page_data('nexttabfunc') if nextfunc is None: - nextfunc = count(1).next + nextfunc = Counter(1) self.set_page_data('nexttabfunc', nextfunc) return nextfunc() @@ -206,31 +229,6 @@ self.set_page_data('rql_varmaker', varmaker) return varmaker - def set_session(self, session, user=None): - """method called by the session handler when the user is authenticated - or an anonymous connection is open - """ - super(CubicWebRequestBase, self).set_session(session, user) - # set request language - vreg = self.vreg - if self.user: - try: - # 1. user specified language - lang = vreg.typed_value('ui.language', - self.user.properties['ui.language']) - self.set_language(lang) - return - except KeyError: - pass - if vreg.config['language-negociation']: - # 2. http negociated language - for lang in self.header_accept_language(): - if lang in self.translations: - self.set_language(lang) - return - # 3. default language - self.set_default_language(vreg) - # input form parameters management ######################################## # common form parameters which should be protected against html values @@ -327,6 +325,7 @@ def set_message(self, msg): assert isinstance(msg, unicode) + self.reset_message() self._msg = msg def set_message_id(self, msgid): @@ -357,6 +356,7 @@ if hasattr(self, '_msg'): del self._msg if hasattr(self, '_msgid'): + self.session.data.pop(self._msgid, u'') del self._msgid def update_search_state(self): @@ -423,6 +423,7 @@ req.execute(rql, args, key) return self.user_callback(rqlexec, rqlargs, *args, **kwargs) + @deprecated('[3.19] use a traditional ajaxfunc / controller') def user_callback(self, cb, cbargs, *args, **kwargs): """register the given user callback and return a URL which can be inserted in an HTML view. When the URL is accessed, the @@ -725,7 +726,13 @@ if '__message' in kwargs: msg = kwargs.pop('__message') kwargs['_cwmsgid'] = self.set_redirect_message(msg) - return super(CubicWebRequestBase, self).build_url(*args, **kwargs) + if not args: + method = 'view' + if (self.from_controller() == 'view' + and not '_restpath' in kwargs): + method = self.relative_path(includeparams=False) or 'view' + args = (method,) + return super(_CubicWebRequestBase, self).build_url(*args, **kwargs) def url(self, includeparams=True): """return currently accessed url""" @@ -986,6 +993,112 @@ def html_content_type(self): return 'text/html' + def set_user_language(self, user): + vreg = self.vreg + if user is not None: + try: + # 1. user-specified language + lang = vreg.typed_value('ui.language', user.properties['ui.language']) + self.set_language(lang) + return + except KeyError: + pass + if vreg.config.get('language-negociation', False): + # 2. http accept-language + for lang in self.header_accept_language(): + if lang in self.translations: + self.set_language(lang) + return + # 3. site's default language + self.set_default_language(vreg) + + +class DBAPICubicWebRequestBase(_CubicWebRequestBase, DBAPIRequest): + + def set_session(self, session): + """method called by the session handler when the user is authenticated + or an anonymous connection is open + """ + super(CubicWebRequestBase, self).set_session(session) + # set request language + self.set_user_language(session.user) + + +def _cnx_func(name): + def proxy(req, *args, **kwargs): + return getattr(req.cnx, name)(*args, **kwargs) + return proxy + + +class ConnectionCubicWebRequestBase(_CubicWebRequestBase): + + def __init__(self, vreg, https=False, form=None, headers={}): + """""" + self.cnx = None + self.session = None + self.vreg = vreg + try: + # no vreg or config which doesn't handle translations + self.translations = vreg.config.translations + except AttributeError: + self.translations = {} + super(ConnectionCubicWebRequestBase, self).__init__(vreg, https=https, + form=form, headers=headers) + from cubicweb.dbapi import DBAPISession, _NeedAuthAccessMock + self.session = DBAPISession(None) + self.cnx = self.user = _NeedAuthAccessMock() + + @property + def transaction_data(self): + return self.cnx.transaction_data + + def set_cnx(self, cnx): + self.cnx = cnx + self.session = cnx._session + self._set_user(cnx.user) + self.set_user_language(cnx.user) + + def execute(self, *args, **kwargs): + rset = self.cnx.execute(*args, **kwargs) + rset.req = self + return rset + + def set_default_language(self, vreg): + # XXX copy from dbapi + try: + lang = vreg.property_value('ui.language') + except Exception: # property may not be registered + lang = 'en' + try: + self.set_language(lang) + except KeyError: + # this occurs usually during test execution + self._ = self.__ = unicode + self.pgettext = lambda x, y: unicode(y) + + entity_metas = _cnx_func('entity_metas') + source_defs = _cnx_func('source_defs') + get_shared_data = _cnx_func('get_shared_data') + set_shared_data = _cnx_func('set_shared_data') + describe = _cnx_func('describe') # deprecated XXX + + # server-side service call ################################################# + + def call_service(self, regid, **kwargs): + return self.cnx.call_service(regid, **kwargs) + + # entities cache management ############################################### + + entity_cache = _cnx_func('entity_cache') + set_entity_cache = _cnx_func('set_entity_cache') + cached_entities = _cnx_func('cached_entities') + drop_entity_cache = _cnx_func('drop_entity_cache') + + + + +CubicWebRequestBase = ConnectionCubicWebRequestBase + ## HTTP-accept parsers / utilies ############################################## def _mimetype_sort_key(accept_info): @@ -1083,4 +1196,4 @@ } from cubicweb import set_log_methods -set_log_methods(CubicWebRequestBase, LOGGER) +set_log_methods(_CubicWebRequestBase, LOGGER) diff -r 84738d495ffd -r 793377697c81 web/test/data/schema.py --- a/web/test/data/schema.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/test/data/schema.py Wed Sep 24 18:04:30 2014 +0200 @@ -43,7 +43,11 @@ class Personne(EntityType): nom = String(fulltextindexed=True, required=True, maxsize=64) prenom = String(fulltextindexed=True, maxsize=64) - sexe = String(maxsize=1, default='M') + sexe = String(maxsize=1, default='M', + __permissions__={ + 'read': ('managers', 'users', 'guests',), + 'add': ('managers', 'users'), + 'update': ('managers', )}) promo = String(vocabulary=('bon','pasbon')) titre = String(fulltextindexed=True, maxsize=128) ass = String(maxsize=128) diff -r 84738d495ffd -r 793377697c81 web/test/data/views.py --- a/web/test/data/views.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/test/data/views.py Wed Sep 24 18:04:30 2014 +0200 @@ -16,32 +16,8 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -from cubicweb.web import Redirect -from cubicweb.web.application import CubicWebPublisher from cubicweb.web.views.ajaxcontroller import ajaxfunc -# proof of concept : monkey patch handle method so that if we are in an -# anonymous session and __fblogin is found is req.form, the user with the -# given login is created if necessary and then a session is opened for that -# user -# NOTE: this require "cookie" authentication mode -def auto_login_handle_request(self, req, path): - if (not req.cnx or req.cnx.anonymous_connection) and req.form.get('__fblogin'): - login = password = req.form.pop('__fblogin') - self.repo.register_user(login, password) - req.form['__login'] = login - req.form['__password'] = password - if req.cnx: - req.cnx.close() - req.cnx = None - try: - self.session_handler.set_session(req) - except Redirect: - pass - assert req.user.login == login - return orig_handle(self, req, path) - - def _recursive_replace_stream_by_content(tree): """ Search for streams (i.e. object that have a 'read' method) in a tree (which branches are lists or tuples), and substitute them by their content, @@ -70,6 +46,3 @@ except Exception, ex: import traceback as tb tb.print_exc(ex) - -orig_handle = CubicWebPublisher.main_handle_request -CubicWebPublisher.main_handle_request = auto_login_handle_request diff -r 84738d495ffd -r 793377697c81 web/test/test_views.py --- a/web/test/test_views.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/test/test_views.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -16,8 +16,7 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """automatic tests""" -from cubicweb.devtools import htmlparser -from cubicweb.devtools.testlib import CubicWebTC, AutoPopulateTest, AutomaticWebTest +from cubicweb.devtools.testlib import AutoPopulateTest, AutomaticWebTest from cubicweb.view import AnyRsetView class AutomaticWebTest(AutomaticWebTest): @@ -28,8 +27,8 @@ ] def to_test_etypes(self): - # We do not really want to test cube views here. So we can drop testing - # some EntityType. The two Blog types below require the sioc cube that + # We do not really want to test cube views here. So we can drop testing + # some EntityType. The two Blog types below require the sioc cube that # we do not want to add as a dependency. etypes = super(AutomaticWebTest, self).to_test_etypes() etypes -= set(('Blog', 'BlogEntry')) @@ -50,29 +49,34 @@ """regression test: make sure we can ask a copy of a composite entity """ - rset = self.execute('CWUser X WHERE X login "admin"') - self.view('copy', rset) + with self.admin_access.web_request() as req: + rset = req.execute('CWUser X WHERE X login "admin"') + self.view('copy', rset, req=req) def test_sortable_js_added(self): - rset = self.execute('CWUser X') - # sortable.js should not be included by default - self.assertFalse('jquery.tablesorter.js' in self.view('oneline', rset)) - # but should be included by the tableview - rset = self.execute('Any P,F,S LIMIT 1 WHERE P is CWUser, P firstname F, P surname S') - self.assertIn('jquery.tablesorter.js', self.view('table', rset).source) + with self.admin_access.web_request() as req: + rset = req.execute('CWUser X') + # sortable.js should not be included by default + self.assertFalse('jquery.tablesorter.js' in self.view('oneline', rset, req=req)) + # but should be included by the tableview + rset = req.execute('Any P,F,S LIMIT 1 WHERE P is CWUser, P firstname F, P surname S') + self.assertIn('jquery.tablesorter.js', self.view('table', rset, req=req).source) def test_js_added_only_once(self): - self.vreg._loadedmods[__name__] = {} - self.vreg.register(SomeView) - rset = self.execute('CWUser X') - source = self.view('someview', rset).source - self.assertEqual(source.count('spam.js'), 1) + with self.admin_access.web_request() as req: + self.vreg._loadedmods[__name__] = {} + self.vreg.register(SomeView) + rset = req.execute('CWUser X') + source = self.view('someview', rset, req=req).source + self.assertEqual(source.count('spam.js'), 1) def test_unrelateddivs(self): - rset = self.execute('Any X WHERE X is CWUser, X login "admin"') - group = self.request().create_entity('CWGroup', name=u'R&D') - req = self.request(relation='in_group_subject') - self.view('unrelateddivs', rset, req) + with self.admin_access.client_cnx() as cnx: + group = cnx.create_entity('CWGroup', name=u'R&D') + cnx.commit() + with self.admin_access.web_request(relation='in_group_subject') as req: + rset = req.execute('Any X WHERE X is CWUser, X login "admin"') + self.view('unrelateddivs', rset, req=req) if __name__ == '__main__': diff -r 84738d495ffd -r 793377697c81 web/test/unittest_application.py --- a/web/test/unittest_application.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/test/unittest_application.py Wed Sep 24 18:04:30 2014 +0200 @@ -18,20 +18,20 @@ """unit tests for cubicweb.web.application""" import base64, Cookie -import sys import httplib -from urllib import unquote from logilab.common.testlib import TestCase, unittest_main from logilab.common.decorators import clear_cache, classproperty -from cubicweb import AuthenticationError, Unauthorized +from cubicweb import AuthenticationError from cubicweb import view from cubicweb.devtools.testlib import CubicWebTC, real_error_handling from cubicweb.devtools.fake import FakeRequest from cubicweb.web import LogOut, Redirect, INTERNAL_FIELD_VALUE from cubicweb.web.views.basecontrollers import ViewController from cubicweb.web.application import anonymized_request +from cubicweb.dbapi import DBAPISession, _NeedAuthAccessMock +from cubicweb import repoapi class FakeMapping: """emulates a mapping module""" @@ -165,48 +165,40 @@ return config def test_cnx_user_groups_sync(self): - user = self.user() - self.assertEqual(user.groups, set(('managers',))) - self.execute('SET X in_group G WHERE X eid %s, G name "guests"' % user.eid) - user = self.user() - self.assertEqual(user.groups, set(('managers',))) - self.commit() - user = self.user() - self.assertEqual(user.groups, set(('managers', 'guests'))) - # cleanup - self.execute('DELETE X in_group G WHERE X eid %s, G name "guests"' % user.eid) - self.commit() - - def test_nonregr_publish1(self): - req = self.request(u'CWEType X WHERE X final FALSE, X meta FALSE') - self.app.handle_request(req, 'view') - - def test_nonregr_publish2(self): - req = self.request(u'Any count(N) WHERE N todo_by U, N is Note, U eid %s' - % self.user().eid) - self.app.handle_request(req, 'view') + with self.admin_access.client_cnx() as cnx: + user = cnx.user + self.assertEqual(user.groups, set(('managers',))) + cnx.execute('SET X in_group G WHERE X eid %s, G name "guests"' % user.eid) + user = cnx.user + self.assertEqual(user.groups, set(('managers',))) + cnx.commit() + user = cnx.user + self.assertEqual(user.groups, set(('managers', 'guests'))) + # cleanup + cnx.execute('DELETE X in_group G WHERE X eid %s, G name "guests"' % user.eid) + cnx.commit() def test_publish_validation_error(self): - req = self.request() - user = self.user() - eid = unicode(user.eid) - req.form = { - 'eid': eid, - '__type:'+eid: 'CWUser', '_cw_entity_fields:'+eid: 'login-subject', - 'login-subject:'+eid: '', # ERROR: no login specified - # just a sample, missing some necessary information for real life - '__errorurl': 'view?vid=edition...' - } - path, params = self.expect_redirect_handle_request(req, 'edit') - forminfo = req.session.data['view?vid=edition...'] - eidmap = forminfo['eidmap'] - self.assertEqual(eidmap, {}) - values = forminfo['values'] - self.assertEqual(values['login-subject:'+eid], '') - self.assertEqual(values['eid'], eid) - error = forminfo['error'] - self.assertEqual(error.entity, user.eid) - self.assertEqual(error.errors['login-subject'], 'required field') + with self.admin_access.web_request() as req: + user = self.user(req) + eid = unicode(user.eid) + req.form = { + 'eid': eid, + '__type:'+eid: 'CWUser', '_cw_entity_fields:'+eid: 'login-subject', + 'login-subject:'+eid: '', # ERROR: no login specified + # just a sample, missing some necessary information for real life + '__errorurl': 'view?vid=edition...' + } + path, params = self.expect_redirect_handle_request(req, 'edit') + forminfo = req.session.data['view?vid=edition...'] + eidmap = forminfo['eidmap'] + self.assertEqual(eidmap, {}) + values = forminfo['values'] + self.assertEqual(values['login-subject:'+eid], '') + self.assertEqual(values['eid'], eid) + error = forminfo['error'] + self.assertEqual(error.entity, user.eid) + self.assertEqual(error.errors['login-subject'], 'required field') def test_validation_error_dont_loose_subentity_data_ctrl(self): @@ -214,28 +206,28 @@ error occurs on the web controller """ - req = self.request() - # set Y before X to ensure both entities are edited, not only X - req.form = {'eid': ['Y', 'X'], '__maineid': 'X', - '__type:X': 'CWUser', '_cw_entity_fields:X': 'login-subject', - # missing required field - 'login-subject:X': u'', - # but email address is set - '__type:Y': 'EmailAddress', '_cw_entity_fields:Y': 'address-subject', - 'address-subject:Y': u'bougloup@logilab.fr', - 'use_email-object:Y': 'X', - # necessary to get validation error handling - '__errorurl': 'view?vid=edition...', - } - path, params = self.expect_redirect_handle_request(req, 'edit') - forminfo = req.session.data['view?vid=edition...'] - self.assertEqual(set(forminfo['eidmap']), set('XY')) - self.assertEqual(forminfo['eidmap']['X'], None) - self.assertIsInstance(forminfo['eidmap']['Y'], int) - self.assertEqual(forminfo['error'].entity, 'X') - self.assertEqual(forminfo['error'].errors, - {'login-subject': 'required field'}) - self.assertEqual(forminfo['values'], req.form) + with self.admin_access.web_request() as req: + # set Y before X to ensure both entities are edited, not only X + req.form = {'eid': ['Y', 'X'], '__maineid': 'X', + '__type:X': 'CWUser', '_cw_entity_fields:X': 'login-subject', + # missing required field + 'login-subject:X': u'', + # but email address is set + '__type:Y': 'EmailAddress', '_cw_entity_fields:Y': 'address-subject', + 'address-subject:Y': u'bougloup@logilab.fr', + 'use_email-object:Y': 'X', + # necessary to get validation error handling + '__errorurl': 'view?vid=edition...', + } + path, params = self.expect_redirect_handle_request(req, 'edit') + forminfo = req.session.data['view?vid=edition...'] + self.assertEqual(set(forminfo['eidmap']), set('XY')) + self.assertEqual(forminfo['eidmap']['X'], None) + self.assertIsInstance(forminfo['eidmap']['Y'], int) + self.assertEqual(forminfo['error'].entity, 'X') + self.assertEqual(forminfo['error'].errors, + {'login-subject': 'required field'}) + self.assertEqual(forminfo['values'], req.form) def test_validation_error_dont_loose_subentity_data_repo(self): @@ -243,28 +235,28 @@ error occurs on the repository """ - req = self.request() - # set Y before X to ensure both entities are edited, not only X - req.form = {'eid': ['Y', 'X'], '__maineid': 'X', - '__type:X': 'CWUser', '_cw_entity_fields:X': 'login-subject,upassword-subject', - # already existent user - 'login-subject:X': u'admin', - 'upassword-subject:X': u'admin', 'upassword-subject-confirm:X': u'admin', - '__type:Y': 'EmailAddress', '_cw_entity_fields:Y': 'address-subject', - 'address-subject:Y': u'bougloup@logilab.fr', - 'use_email-object:Y': 'X', - # necessary to get validation error handling - '__errorurl': 'view?vid=edition...', - } - path, params = self.expect_redirect_handle_request(req, 'edit') - forminfo = req.session.data['view?vid=edition...'] - self.assertEqual(set(forminfo['eidmap']), set('XY')) - self.assertIsInstance(forminfo['eidmap']['X'], int) - self.assertIsInstance(forminfo['eidmap']['Y'], int) - self.assertEqual(forminfo['error'].entity, forminfo['eidmap']['X']) - self.assertEqual(forminfo['error'].errors, - {'login-subject': u'the value "admin" is already used, use another one'}) - self.assertEqual(forminfo['values'], req.form) + with self.admin_access.web_request() as req: + # set Y before X to ensure both entities are edited, not only X + req.form = {'eid': ['Y', 'X'], '__maineid': 'X', + '__type:X': 'CWUser', '_cw_entity_fields:X': 'login-subject,upassword-subject', + # already existent user + 'login-subject:X': u'admin', + 'upassword-subject:X': u'admin', 'upassword-subject-confirm:X': u'admin', + '__type:Y': 'EmailAddress', '_cw_entity_fields:Y': 'address-subject', + 'address-subject:Y': u'bougloup@logilab.fr', + 'use_email-object:Y': 'X', + # necessary to get validation error handling + '__errorurl': 'view?vid=edition...', + } + path, params = self.expect_redirect_handle_request(req, 'edit') + forminfo = req.session.data['view?vid=edition...'] + self.assertEqual(set(forminfo['eidmap']), set('XY')) + self.assertIsInstance(forminfo['eidmap']['X'], int) + self.assertIsInstance(forminfo['eidmap']['Y'], int) + self.assertEqual(forminfo['error'].entity, forminfo['eidmap']['X']) + self.assertEqual(forminfo['error'].errors, + {'login-subject': u'the value "admin" is already used, use another one'}) + self.assertEqual(forminfo['values'], req.form) def test_ajax_view_raise_arbitrary_error(self): class ErrorAjaxView(view.View): @@ -273,17 +265,17 @@ raise Exception('whatever') with self.temporary_appobjects(ErrorAjaxView): with real_error_handling(self.app) as app: - req = self.request(vid='test.ajax.error') - req.ajax_request = True - page = app.handle_request(req, '') + with self.admin_access.web_request(vid='test.ajax.error') as req: + req.ajax_request = True + page = app.handle_request(req, '') self.assertEqual(httplib.INTERNAL_SERVER_ERROR, req.status_out) def _test_cleaned(self, kwargs, injected, cleaned): - req = self.request(**kwargs) - page = self.app.handle_request(req, 'view') - self.assertFalse(injected in page, (kwargs, injected)) - self.assertTrue(cleaned in page, (kwargs, cleaned)) + with self.admin_access.web_request(**kwargs) as req: + page = self.app_handle_request(req, 'view') + self.assertNotIn(injected, page) + self.assertIn(cleaned, page) def test_nonregr_script_kiddies(self): """test against current script injection""" @@ -302,39 +294,28 @@ vreg = self.app.vreg # default value self.assertEqual(vreg.property_value('ui.language'), 'en') - self.execute('INSERT CWProperty X: X value "fr", X pkey "ui.language"') - self.assertEqual(vreg.property_value('ui.language'), 'en') - self.commit() - self.assertEqual(vreg.property_value('ui.language'), 'fr') - self.execute('SET X value "de" WHERE X pkey "ui.language"') - self.assertEqual(vreg.property_value('ui.language'), 'fr') - self.commit() - self.assertEqual(vreg.property_value('ui.language'), 'de') - self.execute('DELETE CWProperty X WHERE X pkey "ui.language"') - self.assertEqual(vreg.property_value('ui.language'), 'de') - self.commit() - self.assertEqual(vreg.property_value('ui.language'), 'en') - - def test_fb_login_concept(self): - """see data/views.py""" - self.set_auth_mode('cookie', 'anon') - self.login('anon') - req = self.request() - origcnx = req.cnx - req.form['__fblogin'] = u'turlututu' - page = self.app.handle_request(req, '') - self.assertFalse(req.cnx is origcnx) - self.assertEqual(req.user.login, 'turlututu') - self.assertTrue('turlututu' in page, page) - req.cnx.close() # avoid warning + with self.admin_access.client_cnx() as cnx: + cnx.execute('INSERT CWProperty X: X value "fr", X pkey "ui.language"') + self.assertEqual(vreg.property_value('ui.language'), 'en') + cnx.commit() + self.assertEqual(vreg.property_value('ui.language'), 'fr') + cnx.execute('SET X value "de" WHERE X pkey "ui.language"') + self.assertEqual(vreg.property_value('ui.language'), 'fr') + cnx.commit() + self.assertEqual(vreg.property_value('ui.language'), 'de') + cnx.execute('DELETE CWProperty X WHERE X pkey "ui.language"') + self.assertEqual(vreg.property_value('ui.language'), 'de') + cnx.commit() + self.assertEqual(vreg.property_value('ui.language'), 'en') # authentication tests #################################################### def test_http_auth_no_anon(self): req, origsession = self.init_authentication('http') self.assertAuthFailure(req) - self.assertRaises(AuthenticationError, self.app_handle_request, req, 'login') - self.assertEqual(req.cnx, None) + self.app.handle_request(req, 'login') + self.assertEqual(401, req.status_out) + clear_cache(req, 'get_authorization') authstr = base64.encodestring('%s:%s' % (self.admlogin, self.admpassword)) req.set_request_header('Authorization', 'basic %s' % authstr) self.assertAuthSuccess(req, origsession) @@ -345,12 +326,13 @@ req, origsession = self.init_authentication('cookie') self.assertAuthFailure(req) try: - form = self.app_handle_request(req, 'login') + form = self.app.handle_request(req, 'login') except Redirect as redir: self.fail('anonymous user should get login form') - self.assertTrue('__login' in form) - self.assertTrue('__password' in form) - self.assertEqual(req.cnx, None) + clear_cache(req, 'get_authorization') + self.assertIn('__login', form) + self.assertIn('__password', form) + self.assertFalse(req.cnx) # Mock cnx are False req.form['__login'] = self.admlogin req.form['__password'] = self.admpassword self.assertAuthSuccess(req, origsession) @@ -358,18 +340,19 @@ self.assertEqual(len(self.open_sessions), 0) def test_login_by_email(self): - login = self.request().user.login - address = login + u'@localhost' - self.execute('INSERT EmailAddress X: X address %(address)s, U primary_email X ' - 'WHERE U login %(login)s', {'address': address, 'login': login}) - self.commit() + with self.admin_access.client_cnx() as cnx: + login = cnx.user.login + address = login + u'@localhost' + cnx.execute('INSERT EmailAddress X: X address %(address)s, U primary_email X ' + 'WHERE U login %(login)s', {'address': address, 'login': login}) + cnx.commit() # # option allow-email-login not set req, origsession = self.init_authentication('cookie') # req.form['__login'] = address # req.form['__password'] = self.admpassword # self.assertAuthFailure(req) # option allow-email-login set - origsession.login = address + #origsession.login = address self.set_option('allow-email-login', True) req.form['__login'] = address req.form['__password'] = self.admpassword @@ -387,22 +370,27 @@ raw=True) clear_cache(req, 'get_authorization') # reset session as if it was a new incoming request - req.session = req.cnx = None + req.session = DBAPISession(None) + req.user = req.cnx = _NeedAuthAccessMock + def _test_auth_anon(self, req): - self.app.connect(req) - asession = req.session + asession = self.app.get_session(req) + # important otherwise _reset_cookie will not use the right session + req.set_cnx(repoapi.ClientConnection(asession)) self.assertEqual(len(self.open_sessions), 1) self.assertEqual(asession.login, 'anon') self.assertTrue(asession.anonymous_session) self._reset_cookie(req) def _test_anon_auth_fail(self, req): - self.assertEqual(len(self.open_sessions), 1) - self.app.connect(req) + self.assertEqual(1, len(self.open_sessions)) + session = self.app.get_session(req) + # important otherwise _reset_cookie will not use the right session + req.set_cnx(repoapi.ClientConnection(session)) self.assertEqual(req.message, 'authentication failure') self.assertEqual(req.session.anonymous_session, True) - self.assertEqual(len(self.open_sessions), 1) + self.assertEqual(1, len(self.open_sessions)) self._reset_cookie(req) def test_http_auth_anon_allowed(self): @@ -427,25 +415,25 @@ req.form['__password'] = self.admpassword self.assertAuthSuccess(req, origsession) self.assertRaises(LogOut, self.app_handle_request, req, 'logout') - self.assertEqual(len(self.open_sessions), 0) + self.assertEqual(0, len(self.open_sessions)) def test_anonymized_request(self): - req = self.request() - self.assertEqual(req.session.login, self.admlogin) - # admin should see anon + admin - self.assertEqual(len(list(req.find_entities('CWUser'))), 2) - with anonymized_request(req): - self.assertEqual(req.session.login, 'anon') - # anon should only see anon user - self.assertEqual(len(list(req.find_entities('CWUser'))), 1) - self.assertEqual(req.session.login, self.admlogin) - self.assertEqual(len(list(req.find_entities('CWUser'))), 2) + with self.admin_access.web_request() as req: + self.assertEqual(self.admlogin, req.session.user.login) + # admin should see anon + admin + self.assertEqual(2, len(list(req.find('CWUser')))) + with anonymized_request(req): + self.assertEqual('anon', req.session.login, 'anon') + # anon should only see anon user + self.assertEqual(1, len(list(req.find('CWUser')))) + self.assertEqual(self.admlogin, req.session.login) + self.assertEqual(2, len(list(req.find('CWUser')))) def test_non_regr_optional_first_var(self): - req = self.request() - # expect a rset with None in [0][0] - req.form['rql'] = 'rql:Any OV1, X WHERE X custom_workflow OV1?' - self.app_handle_request(req) + with self.admin_access.web_request() as req: + # expect a rset with None in [0][0] + req.form['rql'] = 'rql:Any OV1, X WHERE X custom_workflow OV1?' + self.app_handle_request(req) if __name__ == '__main__': diff -r 84738d495ffd -r 793377697c81 web/test/unittest_breadcrumbs.py --- a/web/test/unittest_breadcrumbs.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/test/unittest_breadcrumbs.py Wed Sep 24 18:04:30 2014 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -22,21 +22,26 @@ class BreadCrumbsTC(CubicWebTC): def test_base(self): - req = self.request() - f1 = req.create_entity('Folder', name=u'par&ent') - f2 = req.create_entity('Folder', name=u'chi&ld') - self.execute('SET F2 filed_under F1 WHERE F1 eid %(f1)s, F2 eid %(f2)s', - {'f1' : f1.eid, 'f2' : f2.eid}) - self.commit() - self.assertEqual(f2.view('breadcrumbs'), - 'chi&ld' % f2.eid) - childrset = f2.as_rset() - ibc = self.vreg['ctxcomponents'].select('breadcrumbs', self.request(), rset=childrset) - l = [] - ibc.render(l.append) - self.assertEqual(''.join(l), - """ > Folder_plural > par&ent >  -chi&ld""" % (f1.eid, f2.eid)) + with self.admin_access.web_request() as req: + f1 = req.create_entity('Folder', name=u'par&ent') + f2 = req.create_entity('Folder', name=u'chi&ld') + req.cnx.execute('SET F2 filed_under F1 WHERE F1 eid %(f1)s, F2 eid %(f2)s', + {'f1' : f1.eid, 'f2' : f2.eid}) + req.cnx.commit() + self.assertEqual(f2.view('breadcrumbs'), + '' + 'chi&ld' % f2.eid) + childrset = f2.as_rset() + ibc = self.vreg['ctxcomponents'].select('breadcrumbs', req, rset=childrset) + l = [] + ibc.render(l.append) + self.assertMultiLineEqual(' > ' + 'Folder_plural' + ' > par&ent > \n' + '' + 'chi&ld' % (f1.eid, f2.eid), + ''.join(l)) if __name__ == '__main__': from logilab.common.testlib import unittest_main diff -r 84738d495ffd -r 793377697c81 web/test/unittest_controller.py --- a/web/test/unittest_controller.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/test/unittest_controller.py Wed Sep 24 18:04:30 2014 +0200 @@ -28,30 +28,32 @@ class BaseControllerTC(testlib.CubicWebTC): def test_parse_datetime_ok(self): - ctrl = self.vreg['controllers'].select('view', self.request()) - pd = ctrl._cw.parse_datetime - self.assertIsInstance(pd('2006/06/24 12:18'), datetime) - self.assertIsInstance(pd('2006/06/24'), date) - self.assertIsInstance(pd('2006/06/24 12:18', 'Datetime'), datetime) - self.assertIsInstance(pd('2006/06/24', 'Datetime'), datetime) - self.assertIsInstance(pd('2006/06/24', 'Date'), date) - self.assertIsInstance(pd('12:18', 'Time'), time) + with self.admin_access.web_request() as req: + ctrl = self.vreg['controllers'].select('view', req) + pd = ctrl._cw.parse_datetime + self.assertIsInstance(pd('2006/06/24 12:18'), datetime) + self.assertIsInstance(pd('2006/06/24'), date) + self.assertIsInstance(pd('2006/06/24 12:18', 'Datetime'), datetime) + self.assertIsInstance(pd('2006/06/24', 'Datetime'), datetime) + self.assertIsInstance(pd('2006/06/24', 'Date'), date) + self.assertIsInstance(pd('12:18', 'Time'), time) def test_parse_datetime_ko(self): - ctrl = self.vreg['controllers'].select('view', self.request()) - pd = ctrl._cw.parse_datetime - self.assertRaises(ValueError, - pd, '2006/06/24 12:188', 'Datetime') - self.assertRaises(ValueError, - pd, '2006/06/240', 'Datetime') - self.assertRaises(ValueError, - pd, '2006/06/24 12:18', 'Date') - self.assertRaises(ValueError, - pd, '2006/24/06', 'Date') - self.assertRaises(ValueError, - pd, '2006/06/240', 'Date') - self.assertRaises(ValueError, - pd, '12:188', 'Time') + with self.admin_access.web_request() as req: + ctrl = self.vreg['controllers'].select('view', req) + pd = ctrl._cw.parse_datetime + self.assertRaises(ValueError, + pd, '2006/06/24 12:188', 'Datetime') + self.assertRaises(ValueError, + pd, '2006/06/240', 'Datetime') + self.assertRaises(ValueError, + pd, '2006/06/24 12:18', 'Date') + self.assertRaises(ValueError, + pd, '2006/24/06', 'Date') + self.assertRaises(ValueError, + pd, '2006/06/240', 'Date') + self.assertRaises(ValueError, + pd, '12:188', 'Time') if __name__ == '__main__': unittest_main() diff -r 84738d495ffd -r 793377697c81 web/test/unittest_facet.py --- a/web/test/unittest_facet.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/test/unittest_facet.py Wed Sep 24 18:04:30 2014 +0200 @@ -4,21 +4,20 @@ class BaseFacetTC(CubicWebTC): - def prepare_rqlst(self, rql='CWUser X', mainvar='X', + def prepare_rqlst(self, req, rql='CWUser X', mainvar='X', expected_baserql='Any X WHERE X is CWUser', expected_preparedrql='DISTINCT Any WHERE X is CWUser'): - req = self.request() - rset = self.execute(rql) + rset = req.cnx.execute(rql) rqlst = rset.syntax_tree().copy() filtered_variable, baserql = facet.init_facets(rset, rqlst.children[0], mainvar=mainvar) self.assertEqual(filtered_variable.name, mainvar) self.assertEqual(baserql, expected_baserql) self.assertEqual(rqlst.as_string(), expected_preparedrql) - return req, rset, rqlst, filtered_variable + return rset, rqlst, filtered_variable - def _in_group_facet(self, cls=facet.RelationFacet, no_relation=False): - req, rset, rqlst, filtered_variable = self.prepare_rqlst() + def _in_group_facet(self, req, cls=facet.RelationFacet, no_relation=False): + rset, rqlst, filtered_variable = self.prepare_rqlst(req) cls.no_relation = no_relation f = cls(req, rset=rset, select=rqlst.children[0], filtered_variable=filtered_variable) @@ -26,285 +25,328 @@ f.rtype = 'in_group' f.role = 'subject' f.target_attr = 'name' - guests, managers = [eid for eid, in self.execute('CWGroup G ORDERBY GN ' - 'WHERE G name GN, G name IN ("guests", "managers")')] - groups = [eid for eid, in self.execute('CWGroup G ORDERBY GN ' - 'WHERE G name GN, G name IN ("guests", "managers")')] + guests, managers = [eid for eid, in req.cnx.execute('CWGroup G ORDERBY GN ' + 'WHERE G name GN, G name IN ("guests", "managers")')] + groups = [eid for eid, in req.cnx.execute('CWGroup G ORDERBY GN ' + 'WHERE G name GN, G name IN ("guests", "managers")')] return f, groups def test_relation_simple(self): - f, (guests, managers) = self._in_group_facet() - self.assertEqual(f.vocabulary(), - [(u'guests', guests), (u'managers', managers)]) - # ensure rqlst is left unmodified - self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') - #rqlst = rset.syntax_tree() - self.assertEqual(f.possible_values(), - [str(guests), str(managers)]) - # ensure rqlst is left unmodified - self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') - f._cw.form[f.__regid__] = str(guests) - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X in_group D, D eid %s' % guests) + with self.admin_access.web_request() as req: + f, (guests, managers) = self._in_group_facet(req) + self.assertEqual(f.vocabulary(), + [(u'guests', guests), (u'managers', managers)]) + # ensure rqlst is left unmodified + self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') + #rqlst = rset.syntax_tree() + self.assertEqual(f.possible_values(), + [str(guests), str(managers)]) + # ensure rqlst is left unmodified + self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') + f._cw.form[f.__regid__] = str(guests) + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X in_group D, D eid %s' % guests) def test_relation_multiple_and(self): - f, (guests, managers) = self._in_group_facet() - f._cw.form[f.__regid__] = [str(guests), str(managers)] - f._cw.form[f.__regid__ + '_andor'] = 'AND' - f.add_rql_restrictions() - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X in_group A, B eid %s, X in_group B, A eid %s' % (guests, managers)) + with self.admin_access.web_request() as req: + f, (guests, managers) = self._in_group_facet(req) + f._cw.form[f.__regid__] = [str(guests), str(managers)] + f._cw.form[f.__regid__ + '_andor'] = 'AND' + f.add_rql_restrictions() + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X in_group A, B eid %s, X in_group B, A eid %s' % (guests, managers)) def test_relation_multiple_or(self): - f, (guests, managers) = self._in_group_facet() - f._cw.form[f.__regid__] = [str(guests), str(managers)] - f._cw.form[f.__regid__ + '_andor'] = 'OR' - f.add_rql_restrictions() - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X in_group A, A eid IN(%s, %s)' % (guests, managers)) + with self.admin_access.web_request() as req: + f, (guests, managers) = self._in_group_facet(req) + f._cw.form[f.__regid__] = [str(guests), str(managers)] + f._cw.form[f.__regid__ + '_andor'] = 'OR' + f.add_rql_restrictions() + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X in_group A, A eid IN(%s, %s)' % (guests, managers)) def test_relation_optional_rel(self): - req = self.request() - rset = self.execute('Any X,GROUP_CONCAT(GN) GROUPBY X ' - 'WHERE X in_group G?, G name GN, NOT G name "users"') - rqlst = rset.syntax_tree().copy() - select = rqlst.children[0] - filtered_variable, baserql = facet.init_facets(rset, select) + with self.admin_access.web_request() as req: + rset = req.cnx.execute('Any X,GROUP_CONCAT(GN) GROUPBY X ' + 'WHERE X in_group G?, G name GN, NOT G name "users"') + rqlst = rset.syntax_tree().copy() + select = rqlst.children[0] + filtered_variable, baserql = facet.init_facets(rset, select) - f = facet.RelationFacet(req, rset=rset, - select=select, - filtered_variable=filtered_variable) - f.rtype = 'in_group' - f.role = 'subject' - f.target_attr = 'name' - guests, managers = [eid for eid, in self.execute('CWGroup G ORDERBY GN ' - 'WHERE G name GN, G name IN ("guests", "managers")')] - self.assertEqual(f.vocabulary(), - [(u'guests', guests), (u'managers', managers)]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X in_group G?, G name GN, NOT G name "users"') - #rqlst = rset.syntax_tree() - self.assertEqual(sorted(f.possible_values()), - [str(guests), str(managers)]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X in_group G?, G name GN, NOT G name "users"') - req.form[f.__regid__] = str(guests) - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X in_group G?, G name GN, NOT G name "users", X in_group D, D eid %s' % guests) + f = facet.RelationFacet(req, rset=rset, + select=select, + filtered_variable=filtered_variable) + f.rtype = 'in_group' + f.role = 'subject' + f.target_attr = 'name' + guests, managers = [eid for eid, in req.cnx.execute('CWGroup G ORDERBY GN ' + 'WHERE G name GN, G name IN ("guests", "managers")')] + self.assertEqual(f.vocabulary(), + [(u'guests', guests), (u'managers', managers)]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), "DISTINCT Any WHERE X in_group G?, G name GN, NOT G name 'users'") + #rqlst = rset.syntax_tree() + self.assertEqual(sorted(f.possible_values()), + [str(guests), str(managers)]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), "DISTINCT Any WHERE X in_group G?, G name GN, NOT G name 'users'") + req.form[f.__regid__] = str(guests) + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + "DISTINCT Any WHERE X in_group G?, G name GN, NOT G name 'users', X in_group D, D eid %s" % guests) def test_relation_no_relation_1(self): - f, (guests, managers) = self._in_group_facet(no_relation=True) - self.assertEqual(f.vocabulary(), - [(u'guests', guests), (u'managers', managers)]) - self.assertEqual(f.possible_values(), - [str(guests), str(managers)]) - f._cw.create_entity('CWUser', login=u'hop', upassword='toto') - self.assertEqual(f.vocabulary(), - [(u'', ''), (u'guests', guests), (u'managers', managers)]) - self.assertEqual(f.possible_values(), - [str(guests), str(managers), '']) - f._cw.form[f.__regid__] = '' - f.add_rql_restrictions() - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, NOT X in_group G') + with self.admin_access.web_request() as req: + f, (guests, managers) = self._in_group_facet(req, no_relation=True) + self.assertEqual(f.vocabulary(), + [(u'guests', guests), (u'managers', managers)]) + self.assertEqual(f.possible_values(), + [str(guests), str(managers)]) + f._cw.create_entity('CWUser', login=u'hop', upassword='toto') + self.assertEqual(f.vocabulary(), + [(u'', ''), (u'guests', guests), (u'managers', managers)]) + self.assertEqual(f.possible_values(), + [str(guests), str(managers), '']) + f._cw.form[f.__regid__] = '' + f.add_rql_restrictions() + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, NOT X in_group G') def test_relation_no_relation_2(self): - f, (guests, managers) = self._in_group_facet(no_relation=True) - f._cw.form[f.__regid__] = ['', guests] - f.select.save_state() - f.add_rql_restrictions() - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, (NOT X in_group B) OR (X in_group A, A eid %s)' % guests) - f.select.recover() - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser') + with self.admin_access.web_request() as req: + f, (guests, managers) = self._in_group_facet(req, no_relation=True) + f._cw.form[f.__regid__] = ['', guests] + f.select.save_state() + f.add_rql_restrictions() + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, (NOT X in_group B) OR (X in_group A, A eid %s)' % guests) + f.select.recover() + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser') def test_relationattribute(self): - f, (guests, managers) = self._in_group_facet(cls=facet.RelationAttributeFacet) - self.assertEqual(f.vocabulary(), - [(u'guests', u'guests'), (u'managers', u'managers')]) - # ensure rqlst is left unmodified - self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') - #rqlst = rset.syntax_tree() - self.assertEqual(f.possible_values(), - ['guests', 'managers']) - # ensure rqlst is left unmodified - self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') - f._cw.form[f.__regid__] = 'guests' - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X is CWUser, X in_group E, E name 'guests'") + with self.admin_access.web_request() as req: + f, (guests, managers) = self._in_group_facet(req, cls=facet.RelationAttributeFacet) + self.assertEqual(f.vocabulary(), + [(u'guests', u'guests'), (u'managers', u'managers')]) + # ensure rqlst is left unmodified + self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') + #rqlst = rset.syntax_tree() + self.assertEqual(f.possible_values(), + ['guests', 'managers']) + # ensure rqlst is left unmodified + self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') + f._cw.form[f.__regid__] = 'guests' + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + "DISTINCT Any WHERE X is CWUser, X in_group E, E name 'guests'") def test_daterange(self): - req, rset, rqlst, filtered_variable = self.prepare_rqlst() - f = facet.DateRangeFacet(req, rset=rset, - select=rqlst.children[0], - filtered_variable=filtered_variable) - f.rtype = 'creation_date' - mind, maxd = self.execute('Any MIN(CD), MAX(CD) WHERE X is CWUser, X creation_date CD')[0] - self.assertEqual(f.vocabulary(), - [(str(mind), mind), - (str(maxd), maxd)]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - #rqlst = rset.syntax_tree() - self.assertEqual(f.possible_values(), - [str(mind), str(maxd)]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - req.form['%s_inf' % f.__regid__] = str(datetime2ticks(mind)) - req.form['%s_sup' % f.__regid__] = str(datetime2ticks(mind)) - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X creation_date >= "%s", ' - 'X creation_date <= "%s"' - % (mind.strftime('%Y/%m/%d'), - mind.strftime('%Y/%m/%d'))) + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req) + f = facet.DateRangeFacet(req, rset=rset, + select=rqlst.children[0], + filtered_variable=filtered_variable) + f.rtype = 'creation_date' + mind, maxd = req.cnx.execute('Any MIN(CD), MAX(CD) WHERE X is CWUser, X creation_date CD')[0] + self.assertEqual(f.vocabulary(), + [(str(mind), mind), + (str(maxd), maxd)]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + #rqlst = rset.syntax_tree() + self.assertEqual(f.possible_values(), + [str(mind), str(maxd)]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + req.form['%s_inf' % f.__regid__] = str(datetime2ticks(mind)) + req.form['%s_sup' % f.__regid__] = str(datetime2ticks(mind)) + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X creation_date >= "%s", ' + 'X creation_date <= "%s"' + % (mind.strftime('%Y/%m/%d'), + mind.strftime('%Y/%m/%d'))) def test_attribute(self): - req, rset, rqlst, filtered_variable = self.prepare_rqlst() - f = facet.AttributeFacet(req, rset=rset, - select=rqlst.children[0], - filtered_variable=filtered_variable) - f.rtype = 'login' - self.assertEqual(f.vocabulary(), - [(u'admin', u'admin'), (u'anon', u'anon')]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - #rqlst = rset.syntax_tree() - self.assertEqual(f.possible_values(), - ['admin', 'anon']) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - req.form[f.__regid__] = 'admin' - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X is CWUser, X login 'admin'") + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req) + f = facet.AttributeFacet(req, rset=rset, + select=rqlst.children[0], + filtered_variable=filtered_variable) + f.rtype = 'login' + self.assertEqual(f.vocabulary(), + [(u'admin', u'admin'), (u'anon', u'anon')]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + #rqlst = rset.syntax_tree() + self.assertEqual(f.possible_values(), + ['admin', 'anon']) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + req.form[f.__regid__] = 'admin' + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + "DISTINCT Any WHERE X is CWUser, X login 'admin'") def test_bitfield(self): - req, rset, rqlst, filtered_variable = self.prepare_rqlst( - 'CWAttribute X WHERE X ordernum XO', - expected_baserql='Any X WHERE X ordernum XO, X is CWAttribute', - expected_preparedrql='DISTINCT Any WHERE X ordernum XO, X is CWAttribute') - f = facet.BitFieldFacet(req, rset=rset, - select=rqlst.children[0], - filtered_variable=filtered_variable) - f.choices = [('un', 1,), ('deux', 2,)] - f.rtype = 'ordernum' - self.assertEqual(f.vocabulary(), - [(u'deux', 2), (u'un', 1)]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X ordernum XO, X is CWAttribute') - #rqlst = rset.syntax_tree() - self.assertEqual(f.possible_values(), - ['2', '1']) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X ordernum XO, X is CWAttribute') - req.form[f.__regid__] = '3' - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X ordernum XO, X is CWAttribute, X ordernum C HAVING 3 = (C & 3)") + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req, + 'CWAttribute X WHERE X ordernum XO', + expected_baserql='Any X WHERE X ordernum XO, X is CWAttribute', + expected_preparedrql='DISTINCT Any WHERE X ordernum XO, X is CWAttribute') + f = facet.BitFieldFacet(req, rset=rset, + select=rqlst.children[0], + filtered_variable=filtered_variable) + f.choices = [('un', 1,), ('deux', 2,)] + f.rtype = 'ordernum' + self.assertEqual(f.vocabulary(), + [(u'deux', 2), (u'un', 1)]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X ordernum XO, X is CWAttribute') + #rqlst = rset.syntax_tree() + self.assertEqual(f.possible_values(), + ['2', '1']) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X ordernum XO, X is CWAttribute') + req.form[f.__regid__] = '3' + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + "DISTINCT Any WHERE X ordernum XO, X is CWAttribute, X ordernum C HAVING 3 = (C & 3)") def test_bitfield_0_value(self): - req, rset, rqlst, filtered_variable = self.prepare_rqlst( - 'CWAttribute X WHERE X ordernum XO', - expected_baserql='Any X WHERE X ordernum XO, X is CWAttribute', - expected_preparedrql='DISTINCT Any WHERE X ordernum XO, X is CWAttribute') - f = facet.BitFieldFacet(req, rset=rset, - select=rqlst.children[0], - filtered_variable=filtered_variable) - f.choices = [('zero', 0,), ('un', 1,), ('deux', 2,)] - f.rtype = 'ordernum' - self.assertEqual(f.vocabulary(), - [(u'deux', 2), (u'un', 1), (u'zero', 0)]) - self.assertEqual(f.possible_values(), - ['2', '1', '0']) - req.form[f.__regid__] = '0' - f.add_rql_restrictions() - self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X ordernum XO, X is CWAttribute, X ordernum C HAVING 0 = C") + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req, + 'CWAttribute X WHERE X ordernum XO', + expected_baserql='Any X WHERE X ordernum XO, X is CWAttribute', + expected_preparedrql='DISTINCT Any WHERE X ordernum XO, X is CWAttribute') + f = facet.BitFieldFacet(req, rset=rset, + select=rqlst.children[0], + filtered_variable=filtered_variable) + f.choices = [('zero', 0,), ('un', 1,), ('deux', 2,)] + f.rtype = 'ordernum' + self.assertEqual(f.vocabulary(), + [(u'deux', 2), (u'un', 1), (u'zero', 0)]) + self.assertEqual(f.possible_values(), + ['2', '1', '0']) + req.form[f.__regid__] = '0' + f.add_rql_restrictions() + self.assertEqual(f.select.as_string(), + "DISTINCT Any WHERE X ordernum XO, X is CWAttribute, X ordernum C HAVING 0 = C") def test_rql_path_eid(self): - req, rset, rqlst, filtered_variable = self.prepare_rqlst() - class RPF(facet.RQLPathFacet): - path = [('X created_by U'), ('U owned_by O'), ('O login OL')] - filter_variable = 'O' - label_variable = 'OL' - f = RPF(req, rset=rset, select=rqlst.children[0], - filtered_variable=filtered_variable) - self.assertEqual(f.vocabulary(), [(u'admin', self.user().eid),]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - #rqlst = rset.syntax_tree() - self.assertEqual(f.possible_values(), - [str(self.user().eid),]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - req.form[f.__regid__] = '1' - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X is CWUser, X created_by F, F owned_by G, G eid 1") + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req) + class RPF(facet.RQLPathFacet): + path = [('X created_by U'), ('U owned_by O'), ('O login OL')] + filter_variable = 'O' + label_variable = 'OL' + f = RPF(req, rset=rset, select=rqlst.children[0], + filtered_variable=filtered_variable) + self.assertEqual(f.vocabulary(), [(u'admin', req.user.eid),]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + #rqlst = rset.syntax_tree() + self.assertEqual(f.possible_values(), + [str(req.user.eid),]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + req.form[f.__regid__] = '1' + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + "DISTINCT Any WHERE X is CWUser, X created_by F, F owned_by G, G eid 1") def test_rql_path_eid_no_label(self): - req, rset, rqlst, filtered_variable = self.prepare_rqlst() - class RPF(facet.RQLPathFacet): - path = [('X created_by U'), ('U owned_by O'), ('O login OL')] - filter_variable = 'O' - f = RPF(req, rset=rset, select=rqlst.children[0], - filtered_variable=filtered_variable) - self.assertEqual(f.vocabulary(), [(str(self.user().eid), self.user().eid),]) + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req) + class RPF(facet.RQLPathFacet): + path = [('X created_by U'), ('U owned_by O'), ('O login OL')] + filter_variable = 'O' + f = RPF(req, rset=rset, select=rqlst.children[0], + filtered_variable=filtered_variable) + self.assertEqual(f.vocabulary(), [(str(req.user.eid), req.user.eid),]) def test_rql_path_attr(self): - req, rset, rqlst, filtered_variable = self.prepare_rqlst() - class RPF(facet.RQLPathFacet): - path = [('X created_by U'), ('U owned_by O'), ('O login OL')] - filter_variable = 'OL' - f = RPF(req, rset=rset, select=rqlst.children[0], - filtered_variable=filtered_variable) + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req) + class RPF(facet.RQLPathFacet): + path = [('X created_by U'), ('U owned_by O'), ('O login OL')] + filter_variable = 'OL' + f = RPF(req, rset=rset, select=rqlst.children[0], + filtered_variable=filtered_variable) - self.assertEqual(f.vocabulary(), [(u'admin', 'admin'),]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - self.assertEqual(f.possible_values(), ['admin',]) - # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - req.form[f.__regid__] = 'admin' - f.add_rql_restrictions() - # selection is cluttered because rqlst has been prepared for facet (it - # is not in real life) - self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X is CWUser, X created_by G, G owned_by H, H login 'admin'") + self.assertEqual(f.vocabulary(), [(u'admin', 'admin'),]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + self.assertEqual(f.possible_values(), ['admin',]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + req.form[f.__regid__] = 'admin' + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + "DISTINCT Any WHERE X is CWUser, X created_by G, G owned_by H, H login 'admin'") def test_rql_path_check_filter_label_variable(self): - req, rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst() - class RPF(facet.RQLPathFacet): - path = [('X created_by U'), ('U owned_by O'), ('O login OL')] - filter_variable = 'OL' - label_variable = 'OL' - self.assertRaises(AssertionError, RPF, req, rset=rset, - select=rqlst.children[0], - filtered_variable=filtered_variable) + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst(req) + class RPF(facet.RQLPathFacet): + path = [('X created_by U'), ('U owned_by O'), ('O login OL')] + filter_variable = 'OL' + label_variable = 'OL' + self.assertRaises(AssertionError, RPF, req, rset=rset, + select=rqlst.children[0], + filtered_variable=filtered_variable) + - def prepareg_aggregat_rqlst(self): - return self.prepare_rqlst( + def test_rqlpath_range(self): + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepare_rqlst(req) + class RRF(facet.DateRangeRQLPathFacet): + path = [('X created_by U'), ('U owned_by O'), ('O creation_date OL')] + filter_variable = 'OL' + f = RRF(req, rset=rset, select=rqlst.children[0], + filtered_variable=filtered_variable) + mind, maxd = req.cnx.execute('Any MIN(CD), MAX(CD) WHERE X is CWUser, X created_by U, U owned_by O, O creation_date CD')[0] + self.assertEqual(f.vocabulary(), [(str(mind), mind), + (str(maxd), maxd)]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + self.assertEqual(f.possible_values(), + [str(mind), str(maxd)]) + # ensure rqlst is left unmodified + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') + req.form['%s_inf' % f.__regid__] = str(datetime2ticks(mind)) + req.form['%s_sup' % f.__regid__] = str(datetime2ticks(mind)) + f.add_rql_restrictions() + # selection is cluttered because rqlst has been prepared for facet (it + # is not in real life) + self.assertEqual(f.select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X created_by G, G owned_by H, H creation_date >= "%s", ' + 'H creation_date <= "%s"' + % (mind.strftime('%Y/%m/%d'), + mind.strftime('%Y/%m/%d'))) + + def prepareg_aggregat_rqlst(self, req): + return self.prepare_rqlst(req, 'Any 1, COUNT(X) WHERE X is CWUser, X creation_date XD, ' 'X modification_date XM, Y creation_date YD, Y is CWGroup ' 'HAVING DAY(XD)>=DAY(YD) AND DAY(XM)<=DAY(YD)', 'X', @@ -317,47 +359,50 @@ def test_aggregat_query_cleanup_select(self): - req, rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst() - select = rqlst.children[0] - facet.cleanup_select(select, filtered_variable=filtered_variable) - self.assertEqual(select.as_string(), - 'DISTINCT Any WHERE X is CWUser, X creation_date XD, ' - 'X modification_date XM, Y creation_date YD, Y is CWGroup ' - 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)') + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst(req) + select = rqlst.children[0] + facet.cleanup_select(select, filtered_variable=filtered_variable) + self.assertEqual(select.as_string(), + 'DISTINCT Any WHERE X is CWUser, X creation_date XD, ' + 'X modification_date XM, Y creation_date YD, Y is CWGroup ' + 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)') def test_aggregat_query_rql_path(self): - req, rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst() - class RPF(facet.RQLPathFacet): - path = [('X created_by U'), ('U owned_by O'), ('O login OL')] - filter_variable = 'OL' - f = RPF(req, rset=rset, select=rqlst.children[0], - filtered_variable=filtered_variable) - self.assertEqual(f.vocabulary(), [(u'admin', u'admin')]) - self.assertEqual(f.possible_values(), ['admin']) - req.form[f.__regid__] = 'admin' - f.add_rql_restrictions() - self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X is CWUser, X creation_date XD, " - "X modification_date XM, Y creation_date YD, Y is CWGroup, " - "X created_by G, G owned_by H, H login 'admin' " - "HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)") + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst(req) + class RPF(facet.RQLPathFacet): + path = [('X created_by U'), ('U owned_by O'), ('O login OL')] + filter_variable = 'OL' + f = RPF(req, rset=rset, select=rqlst.children[0], + filtered_variable=filtered_variable) + self.assertEqual(f.vocabulary(), [(u'admin', u'admin')]) + self.assertEqual(f.possible_values(), ['admin']) + req.form[f.__regid__] = 'admin' + f.add_rql_restrictions() + self.assertEqual(f.select.as_string(), + "DISTINCT Any WHERE X is CWUser, X creation_date XD, " + "X modification_date XM, Y creation_date YD, Y is CWGroup, " + "X created_by G, G owned_by H, H login 'admin' " + "HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)") def test_aggregat_query_attribute(self): - req, rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst() - f = facet.AttributeFacet(req, rset=rset, - select=rqlst.children[0], - filtered_variable=filtered_variable) - f.rtype = 'login' - self.assertEqual(f.vocabulary(), - [(u'admin', u'admin'), (u'anon', u'anon')]) - self.assertEqual(f.possible_values(), - ['admin', 'anon']) - req.form[f.__regid__] = 'admin' - f.add_rql_restrictions() - self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X is CWUser, X creation_date XD, " - "X modification_date XM, Y creation_date YD, Y is CWGroup, X login 'admin' " - "HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)") + with self.admin_access.web_request() as req: + rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst(req) + f = facet.AttributeFacet(req, rset=rset, + select=rqlst.children[0], + filtered_variable=filtered_variable) + f.rtype = 'login' + self.assertEqual(f.vocabulary(), + [(u'admin', u'admin'), (u'anon', u'anon')]) + self.assertEqual(f.possible_values(), + ['admin', 'anon']) + req.form[f.__regid__] = 'admin' + f.add_rql_restrictions() + self.assertEqual(f.select.as_string(), + "DISTINCT Any WHERE X is CWUser, X creation_date XD, " + "X modification_date XM, Y creation_date YD, Y is CWGroup, X login 'admin' " + "HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)") if __name__ == '__main__': from logilab.common.testlib import unittest_main diff -r 84738d495ffd -r 793377697c81 web/test/unittest_form.py --- a/web/test/unittest_form.py Wed Sep 24 17:35:59 2014 +0200 +++ b/web/test/unittest_form.py Wed Sep 24 18:04:30 2014 +0200 @@ -34,131 +34,138 @@ class FieldsFormTC(CubicWebTC): def test_form_field_format(self): - form = FieldsForm(self.request(), None) - self.assertEqual(StringField().format(form), 'text/html') - self.execute('INSERT CWProperty X: X pkey "ui.default-text-format", X value "text/rest", X for_user U WHERE U login "admin"') - self.commit() - self.assertEqual(StringField().format(form), 'text/rest') + with self.admin_access.web_request() as req: + form = FieldsForm(req, None) + self.assertEqual(StringField().format(form), 'text/html') + req.cnx.execute('INSERT CWProperty X: X pkey "ui.default-text-format", X value "text/rest", X for_user U WHERE U login "admin"') + req.cnx.commit() + self.assertEqual(StringField().format(form), 'text/rest') def test_process_posted(self): class AForm(FieldsForm): anint = IntField() astring = StringField() - form = AForm(self.request(anint='1', astring='2', _cw_fields='anint,astring')) - self.assertEqual(form.process_posted(), {'anint': 1, 'astring': '2'}) - form = AForm(self.request(anint='1a', astring='2b', _cw_fields='anint,astring')) - self.assertRaises(ValidationError, form.process_posted) + with self.admin_access.web_request(anint='1', astring='2', _cw_fields='anint,astring') as req: + form = AForm(req) + self.assertEqual(form.process_posted(), {'anint': 1, 'astring': '2'}) + with self.admin_access.web_request(anint='1a', astring='2b', _cw_fields='anint,astring') as req: + form = AForm(req) + self.assertRaises(ValidationError, form.process_posted) class EntityFieldsFormTC(CubicWebTC): - def setUp(self): - super(EntityFieldsFormTC, self).setUp() - self.req = self.request() - self.entity = self.user(self.req) + def test_form_field_choices(self): + with self.admin_access.web_request() as req: + b = req.create_entity('BlogEntry', title=u'di mascii code', content=u'a best-seller') + t = req.create_entity('Tag', name=u'x') + form1 = self.vreg['forms'].select('edition', req, entity=t) + choices = [reid for rview, reid in form1.field_by_name('tags', 'subject', t.e_schema).choices(form1)] + self.assertIn(unicode(b.eid), choices) + form2 = self.vreg['forms'].select('edition', req, entity=b) + choices = [reid for rview, reid in form2.field_by_name('tags', 'object', t.e_schema).choices(form2)] + self.assertIn(unicode(t.eid), choices) - def test_form_field_choices(self): - b = self.req.create_entity('BlogEntry', title=u'di mascii code', content=u'a best-seller') - t = self.req.create_entity('Tag', name=u'x') - form1 = self.vreg['forms'].select('edition', self.req, entity=t) - choices = [reid for rview, reid in form1.field_by_name('tags', 'subject', t.e_schema).choices(form1)] - self.assertIn(unicode(b.eid), choices) - form2 = self.vreg['forms'].select('edition', self.req, entity=b) - choices = [reid for rview, reid in form2.field_by_name('tags', 'object', t.e_schema).choices(form2)] - self.assertIn(unicode(t.eid), choices) + b.cw_clear_all_caches() + t.cw_clear_all_caches() + req.cnx.execute('SET X tags Y WHERE X is Tag, Y is BlogEntry') - b.cw_clear_all_caches() - t.cw_clear_all_caches() - self.execute('SET X tags Y WHERE X is Tag, Y is BlogEntry') - - choices = [reid for rview, reid in form1.field_by_name('tags', 'subject', t.e_schema).choices(form1)] - self.assertIn(unicode(b.eid), choices) - choices = [reid for rview, reid in form2.field_by_name('tags', 'object', t.e_schema).choices(form2)] - self.assertIn(unicode(t.eid), choices) + choices = [reid for rview, reid in form1.field_by_name('tags', 'subject', t.e_schema).choices(form1)] + self.assertIn(unicode(b.eid), choices) + choices = [reid for rview, reid in form2.field_by_name('tags', 'object', t.e_schema).choices(form2)] + self.assertIn(unicode(t.eid), choices) def test_form_field_choices_new_entity(self): - e = self.vreg['etypes'].etype_class('CWUser')(self.request()) - form = self.vreg['forms'].select('edition', self.req, entity=e) - unrelated = [rview for rview, reid in form.field_by_name('in_group', 'subject').choices(form)] - # should be default groups but owners, i.e. managers, users, guests - self.assertEqual(unrelated, [u'guests', u'managers', u'users']) + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('CWUser')(req) + form = self.vreg['forms'].select('edition', req, entity=e) + unrelated = [rview for rview, reid in form.field_by_name('in_group', 'subject').choices(form)] + # should be default groups but owners, i.e. managers, users, guests + self.assertEqual(unrelated, [u'guests', u'managers', u'users']) def test_consider_req_form_params(self): - e = self.vreg['etypes'].etype_class('CWUser')(self.request()) - e.eid = 'A' - form = EntityFieldsForm(self.request(login=u'toto'), None, entity=e) - field = StringField(name='login', role='subject', eidparam=True) - form.append_field(field) - form.build_context({}) - self.assertEqual(field.widget.values(form, field), (u'toto',)) + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('CWUser')(req) + e.eid = 'A' + with self.admin_access.web_request(login=u'toto') as toto_req: + form = EntityFieldsForm(toto_req, None, entity=e) + field = StringField(name='login', role='subject', eidparam=True) + form.append_field(field) + form.build_context({}) + self.assertEqual(field.widget.values(form, field), (u'toto',)) def test_linkto_field_duplication_inout(self): - e = self.vreg['etypes'].etype_class('CWUser')(self.request()) - e.eid = 'A' - e._cw = self.req - geid = self.execute('CWGroup X WHERE X name "users"')[0][0] - self.req.form['__linkto'] = 'in_group:%s:subject' % geid - form = self.vreg['forms'].select('edition', self.req, entity=e) - form.content_type = 'text/html' - pageinfo = self._check_html(form.render(), form, template=None) - inputs = pageinfo.find_tag('select', False) - ok = False - for selectnode in pageinfo.matching_nodes('select', name='from_in_group-subject:A'): - for optionnode in selectnode: - self.assertEqual(optionnode.get('value'), str(geid)) - self.assertEqual(ok, False) - ok = True - inputs = pageinfo.find_tag('input', False) - self.assertFalse(list(pageinfo.matching_nodes('input', name='__linkto'))) + with self.admin_access.web_request() as req: + e = self.vreg['etypes'].etype_class('CWUser')(req) + e.eid = 'A' + e._cw = req + geid = req.cnx.execute('CWGroup X WHERE X name "users"')[0][0] + req.form['__linkto'] = 'in_group:%s:subject' % geid + form = self.vreg['forms'].select('edition', req, entity=e) + form.content_type = 'text/html' + pageinfo = self._check_html(form.render(), form, template=None) + inputs = pageinfo.find_tag('select', False) + ok = False + for selectnode in pageinfo.matching_nodes('select', name='from_in_group-subject:A'): + for optionnode in selectnode: + self.assertEqual(optionnode.get('value'), str(geid)) + self.assertEqual(ok, False) + ok = True + inputs = pageinfo.find_tag('input', False) + self.assertFalse(list(pageinfo.matching_nodes('input', name='__linkto'))) def test_reledit_composite_field(self): - rset = self.execute('INSERT BlogEntry X: X title "cubicweb.org", X content "hop"') - form = self.vreg['views'].select('reledit', self.request(), - rset=rset, row=0, rtype='content') - data = form.render(row=0, rtype='content', formid='base', action='edit_rtype') - self.assertTrue('content_format' in data) + with self.admin_access.web_request() as req: + rset = req.execute('INSERT BlogEntry X: X title "cubicweb.org", X content "hop"') + form = self.vreg['views'].select('reledit', req, + rset=rset, row=0, rtype='content') + data = form.render(row=0, rtype='content', formid='base', action='edit_rtype') + self.assertIn('content_format', data) # form tests ############################################################## def test_form_inheritance(self): - class CustomChangeStateForm(ChangeStateForm): - hello = IntField(name='youlou') - creation_date = DateTimeField(widget=DateTimePicker) - form = CustomChangeStateForm(self.req, redirect_path='perdu.com', - entity=self.entity) - form.render(formvalues=dict(state=123, trcomment=u'', - trcomment_format=u'text/plain')) + with self.admin_access.web_request() as req: + class CustomChangeStateForm(ChangeStateForm): + hello = IntField(name='youlou') + creation_date = DateTimeField(widget=DateTimePicker) + form = CustomChangeStateForm(req, redirect_path='perdu.com', + entity=req.user) + form.render(formvalues=dict(state=123, trcomment=u'', + trcomment_format=u'text/plain')) def test_change_state_form(self): - form = ChangeStateForm(self.req, redirect_path='perdu.com', - entity=self.entity) - form.render(formvalues=dict(state=123, trcomment=u'', - trcomment_format=u'text/plain')) + with self.admin_access.web_request() as req: + form = ChangeStateForm(req, redirect_path='perdu.com', + entity=req.user) + form.render(formvalues=dict(state=123, trcomment=u'', + trcomment_format=u'text/plain')) # fields tests ############################################################ - def _render_entity_field(self, name, form): + def _render_entity_field(self, req, name, form): form.build_context({}) - renderer = FormRenderer(self.req) + renderer = FormRenderer(req) return form.field_by_name(name, 'subject').render(form, renderer) - def _test_richtextfield(self, expected): + def _test_richtextfield(self, req, expected): class RTFForm(EntityFieldsForm): description = RichTextField(eidparam=True, role='subject') - state = self.vreg['etypes'].etype_class('State')(self.req) + state = self.vreg['etypes'].etype_class('State')(req) state.eid = 'S' - form = RTFForm(self.req, redirect_path='perdu.com', entity=state) + form = RTFForm(req, redirect_path='perdu.com', entity=state) # make it think it can use fck editor anyway form.field_by_name('description', 'subject').format = lambda form, field=None: 'text/html' - self.assertMultiLineEqual(self._render_entity_field('description', form), + self.assertMultiLineEqual(self._render_entity_field(req, 'description', form), expected % {'eid': state.eid}) def test_richtextfield_1(self): - self.req.use_fckeditor = lambda: False - self._test_richtextfield(''' @@ -167,8 +174,9 @@ def test_richtextfield_2(self): - self.req.use_fckeditor = lambda: True - self._test_richtextfield('') + with self.admin_access.web_request() as req: + req.use_fckeditor = lambda: True + self._test_richtextfield(req, '') def test_filefield(self): @@ -179,10 +187,11 @@ encoding_field=StringField(name='data_encoding', max_length=20, eidparam=True, role='subject'), eidparam=True, role='subject') - file = self.req.create_entity('File', data_name=u"pouet.txt", data_encoding=u'UTF-8', - data=Binary('new widgets system')) - form = FFForm(self.req, redirect_path='perdu.com', entity=file) - self.assertMultiLineEqual(self._render_entity_field('data', form), + with self.admin_access.web_request() as req: + file = req.create_entity('File', data_name=u"pouet.txt", data_encoding=u'UTF-8', + data=Binary('new widgets system')) + form = FFForm(req, redirect_path='perdu.com', entity=file) + self.assertMultiLineEqual(self._render_entity_field(req, 'data', form), ''' show advanced fields