author | Julien Cristau <julien.cristau@logilab.fr> |
Mon, 13 Jan 2014 13:47:47 +0100 | |
changeset 9402 | 2c48c091b6a2 |
parent 9127 | aff75b69db92 (diff) |
parent 9401 | 17dc43c1f1bd (current diff) |
child 9403 | d81207fb9499 |
--- a/__init__.py Fri Jan 10 18:31:07 2014 +0100 +++ b/__init__.py Mon Jan 13 13:47:47 2014 +0100 @@ -242,3 +242,15 @@ errors[rname(*key)] = errors.pop(key) return ValidationError(getattr(entity, 'eid', entity), errors, substitutions, i18nvalues) + + +# exceptions ################################################################## + +class ProgrammingError(Exception): #DatabaseError): + """Exception raised for errors that are related to the database's operation + and not necessarily under the control of the programmer, e.g. an unexpected + disconnect occurs, the data source name is not found, a transaction could + not be processed, a memory allocation error occurred during processing, + etc. + """ +
--- a/dbapi.py Fri Jan 10 18:31:07 2014 +0100 +++ b/dbapi.py Mon Jan 13 13:47:47 2014 +0100 @@ -34,13 +34,13 @@ from urlparse import urlparse from logilab.common.logging_ext import set_log_methods -from logilab.common.decorators import monkeypatch +from logilab.common.decorators import monkeypatch, cachedproperty from logilab.common.deprecation import deprecated -from cubicweb import ETYPE_NAME_MAP, ConnectionError, AuthenticationError,\ - cwvreg, cwconfig +from cubicweb import (ETYPE_NAME_MAP, AuthenticationError, ProgrammingError, + cwvreg, cwconfig) +from cubicweb.repoapi import get_repository from cubicweb.req import RequestSessionBase -from cubicweb.utils import parse_repo_uri _MARKER = object() @@ -91,53 +91,9 @@ self.close_on_del = close -def _get_inmemory_repo(config, vreg=None): - from cubicweb.server.repository import Repository - from cubicweb.server.utils import TasksManager - return Repository(config, TasksManager(), vreg=vreg) - -def get_repository(uri=None, config=None, vreg=None): - """get a repository for the given URI or config/vregistry (in case we're - loading the repository for a client, eg web server, configuration). - - The returned repository may be an in-memory repository or a proxy object - using a specific RPC method, depending on the given URI (pyro or zmq). - """ - if uri is None: - return _get_inmemory_repo(config, vreg) - - protocol, hostport, appid = parse_repo_uri(uri) - - if protocol == 'inmemory': - # me may have been called with a dummy 'inmemory://' uri ... - return _get_inmemory_repo(config, vreg) - - if protocol == 'pyroloc': # direct connection to the instance - from logilab.common.pyro_ext import get_proxy - uri = uri.replace('pyroloc', 'PYRO') - return get_proxy(uri) - - if protocol == 'pyro': # connection mediated through the pyro ns - from logilab.common.pyro_ext import ns_get_proxy - path = appid.strip('/') - if not path: - raise ConnectionError( - "can't find instance name in %s (expected to be the path component)" - % uri) - if '.' in path: - nsgroup, nsid = path.rsplit('.', 1) - else: - nsgroup = 'cubicweb' - nsid = path - return ns_get_proxy(nsid, defaultnsgroup=nsgroup, nshost=hostport) - - if protocol.startswith('zmqpickle-'): - from cubicweb.zmqclient import ZMQRepositoryClient - return ZMQRepositoryClient(uri) - else: - raise ConnectionError('unknown protocol: `%s`' % protocol) +@deprecated('[4.0] the dbapi is deprecated. Have a look at the new repoapi.') def _repo_connect(repo, login, **kwargs): """Constructor to create a new connection to the given CubicWeb repository. @@ -327,17 +283,14 @@ else: # these args are initialized after a connection is # established - self.session = None + self.session = DBAPISession(None) self.cnx = self.user = _NeedAuthAccessMock() self.set_default_language(vreg) - def from_controller(self): - return 'view' - def get_option_value(self, option, foreid=None): return self.cnx.get_option_value(option, foreid) - def set_session(self, session, user=None): + def set_session(self, session): """method called by the session handler when the user is authenticated or an anonymous connection is open """ @@ -345,11 +298,8 @@ if session.cnx: self.cnx = session.cnx self.execute = session.cnx.cursor(self).execute - if user is None: - user = self.cnx.user(self) - if user is not None: - self.user = user - self.set_entity_cache(user) + self.user = self.cnx.user(self) + self.set_entity_cache(self.user) def execute(self, *args, **kwargs): # pylint: disable=E0202 """overriden when session is set. By default raise authentication error @@ -371,8 +321,8 @@ # server-side service call ################################################# - def call_service(self, regid, async=False, **kwargs): - return self.cnx.call_service(regid, async, **kwargs) + def call_service(self, regid, **kwargs): + return self.cnx.call_service(regid, **kwargs) # entities cache management ############################################### @@ -415,13 +365,6 @@ """return the definition of sources used by the repository.""" return self.cnx.source_defs() - @deprecated('[3.17] do not use hijack_user. create new Session object') - def hijack_user(self, user): - """return a fake request/session using specified user""" - req = DBAPIRequest(self.vreg) - req.set_session(self.session, user) - return req - # these are overridden by set_log_methods below # only defining here to prevent pylint from complaining info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None @@ -429,16 +372,6 @@ set_log_methods(DBAPIRequest, getLogger('cubicweb.dbapi')) -# exceptions ################################################################## - -class ProgrammingError(Exception): #DatabaseError): - """Exception raised for errors that are related to the database's operation - and not necessarily under the control of the programmer, e.g. an unexpected - disconnect occurs, the data source name is not found, a transaction could - not be processed, a memory allocation error occurred during processing, - etc. - """ - # cursor / connection objects ################################################## @@ -531,7 +464,6 @@ # make exceptions available through the connection object ProgrammingError = ProgrammingError # attributes that may be overriden per connection instance - anonymous_connection = False cursor_class = Cursor vreg = None _closed = None @@ -557,6 +489,13 @@ return False return isinstance(self._repo, Repository) + @property # could be a cached property but we want to prevent assigment to + # catch potential programming error. + def anonymous_connection(self): + login = self._repo.user_info(self.sessionid)[1] + anon_login = self.vreg.config.get('anonymous-user') + return login == anon_login + def __repr__(self): if self.anonymous_connection: return '<Connection %s (anonymous)>' % self.sessionid @@ -583,8 +522,8 @@ # server-side service call ################################################# @check_not_closed - def call_service(self, regid, async=False, **kwargs): - return self._repo.call_service(self.sessionid, regid, async, **kwargs) + def call_service(self, regid, **kwargs): + return self._repo.call_service(self.sessionid, regid, **kwargs) # connection initialization methods ######################################## @@ -641,11 +580,11 @@ def request(self): if self._web_request: - from cubicweb.web.request import CubicWebRequestBase - req = CubicWebRequestBase(self.vreg, False) + from cubicweb.web.request import DBAPICubicWebRequestBase + req = DBAPICubicWebRequestBase(self.vreg, False) req.get_header = lambda x, default=None: default - req.set_session = lambda session, user=None: DBAPIRequest.set_session( - req, session, user) + req.set_session = lambda session: DBAPIRequest.set_session( + req, session) req.relative_path = lambda includeparams=True: '' else: req = DBAPIRequest(self.vreg)
--- a/devtools/__init__.py Fri Jan 10 18:31:07 2014 +0100 +++ b/devtools/__init__.py Mon Jan 13 13:47:47 2014 +0100 @@ -268,7 +268,6 @@ skip_db_create_and_restore = True read_instance_schema = True # read schema from database - # test database handling ####################################################### DEFAULT_EMPTY_DB_ID = '__default_empty_db__'
--- a/devtools/fake.py Fri Jan 10 18:31:07 2014 +0100 +++ b/devtools/fake.py Mon Jan 13 13:47:47 2014 +0100 @@ -24,7 +24,7 @@ from cubicweb.req import RequestSessionBase from cubicweb.cwvreg import CWRegistryStore -from cubicweb.web.request import CubicWebRequestBase +from cubicweb.web.request import ConnectionCubicWebRequestBase from cubicweb.devtools import BASE_URL, BaseApptestConfiguration @@ -53,7 +53,7 @@ return {'system': {'db-driver': 'sqlite'}} -class FakeRequest(CubicWebRequestBase): +class FakeRequest(ConnectionCubicWebRequestBase): """test implementation of an cubicweb request object""" def __init__(self, *args, **kwargs):
--- a/devtools/httptest.py Fri Jan 10 18:31:07 2014 +0100 +++ b/devtools/httptest.py Mon Jan 13 13:47:47 2014 +0100 @@ -104,7 +104,7 @@ reactor.addSystemEventTrigger('after', 'startup', semaphore.release) t = threading.Thread(target=safe_run, name='cubicweb_test_web_server', - args=(self.config, self.vreg, True)) + args=(self.config, True), kwargs={'repo': self.repo}) self.web_thread = t t.start() semaphore.acquire()
--- a/devtools/repotest.py Fri Jan 10 18:31:07 2014 +0100 +++ b/devtools/repotest.py Mon Jan 13 13:47:47 2014 +0100 @@ -262,8 +262,8 @@ u = self.repo._build_user(self.session, self.session.user.eid) u._groups = set(groups) s = Session(u, self.repo) - s._tx.cnxset = self.cnxset - s._tx.ctx_count = 1 + s._cnx.cnxset = self.cnxset + s._cnx.ctx_count = 1 # register session to ensure it gets closed self._dumb_sessions.append(s) return s @@ -311,8 +311,8 @@ del self.repo.sources_by_uri[source.uri] undo_monkey_patch() for session in self._dumb_sessions: - if session._tx.cnxset is not None: - session._tx.cnxset = None + if session._cnx.cnxset is not None: + session._cnx.cnxset = None session.close() def _prepare_plan(self, rql, kwargs=None): @@ -324,7 +324,8 @@ select.solutions.sort() else: rqlst.solutions.sort() - return self.o.plan_factory(rqlst, kwargs, self.session) + with self.session.ensure_cnx_set: + return self.o.plan_factory(rqlst, kwargs, self.session) # monkey patch some methods to get predicatable results #######################
--- a/devtools/test/unittest_testlib.py Fri Jan 10 18:31:07 2014 +0100 +++ b/devtools/test/unittest_testlib.py Mon Jan 13 13:47:47 2014 +0100 @@ -189,5 +189,35 @@ self.assertIn(AnAppobject, self.vreg['hip']['hop']) self.assertNotIn(AnAppobject, self.vreg['hip']['hop']) + +class RepoAccessTC(CubicWebTC): + def test_repo_connection(self): + acc = self.new_access('admin') + with acc.repo_cnx() as cnx: + rset = cnx.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + + def test_client_connection(self): + acc = self.new_access('admin') + with acc.client_cnx() as cnx: + rset = cnx.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + + def test_web_request(self): + acc = self.new_access('admin') + with acc.web_request(elephant='babar') as req: + rset = req.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + self.assertEqual('babar', req.form['elephant']) + + def test_close(self): + acc = self.new_access('admin') + acc.close() + + def test_admin_access(self): + with self.admin_access.client_cnx() as cnx: + self.assertEqual('admin', cnx.user.login) + + if __name__ == '__main__': unittest_main()
--- a/devtools/testlib.py Fri Jan 10 18:31:07 2014 +0100 +++ b/devtools/testlib.py Mon Jan 13 13:47:47 2014 +0100 @@ -39,12 +39,13 @@ from logilab.common.deprecation import deprecated, class_deprecated from logilab.common.shellutils import getlogin -from cubicweb import ValidationError, NoSelectableObject -from cubicweb import cwconfig, dbapi, devtools, web, server +from cubicweb import ValidationError, NoSelectableObject, AuthenticationError +from cubicweb import cwconfig, dbapi, devtools, web, server, repoapi from cubicweb.utils import json from cubicweb.sobjects import notification from cubicweb.web import Redirect, application from cubicweb.server.hook import SendMailOp +from cubicweb.server.session import Session from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS from cubicweb.devtools import fake, htmlparser, DEFAULT_EMPTY_DB_ID from cubicweb.utils import json @@ -147,15 +148,85 @@ return getattr(self.cnx, attrname) def __enter__(self): - return self.cnx.__enter__() + # already open + return self.cnx def __exit__(self, exctype, exc, tb): try: return self.cnx.__exit__(exctype, exc, tb) finally: - self.cnx.close() self.testcase.restore_connection() +# Repoaccess utility ###############################################3########### + +class RepoAccess(object): + """An helper to easily create object to access the repo as a specific user + + Each RepoAccess have it own session. + + A repo access can create three type of object: + + .. automethod:: cubicweb.testlib.RepoAccess.repo_cnx + .. automethod:: cubicweb.testlib.RepoAccess.client_cnx + .. automethod:: cubicweb.testlib.RepoAccess.web_request + + The RepoAccess need to be closed to destroy the associated Session. + TestCase usually take care of this aspect for the user. + + .. automethod:: cubicweb.testlib.RepoAccess.close + """ + + def __init__(self, repo, login, requestcls): + self._repo = repo + self._login = login + self.requestcls = requestcls + # opening session + # + # XXX this very hackish code should be cleaned and move on repo. + with repo.internal_cnx() as cnx: + rset = cnx.execute('CWUser U WHERE U login %(u)s', {'u': login}) + user = rset.get_entity(0, 0) + user.groups + user.properties + self._session = Session(user, repo) + repo._sessions[self._session.id] = self._session + self._session.user._cw = self._session + + @ contextmanager + def repo_cnx(self): + """Context manager returning a server side connection for the user""" + with self._session.new_cnx() as cnx: + yield cnx + + @ contextmanager + def client_cnx(self): + """Context manager returning a client side connection for the user""" + with repoapi.ClientConnection(self._session) as cnx: + yield cnx + + @ contextmanager + def web_request(self, url=None, headers={}, **kwargs): + """Context manager returning a web request pre-linked to a client cnx + + To commit and rollback use:: + + req.cnx.commit() + req.cnx.rolback() + """ + req = self.requestcls(self._repo.vreg, url=url, headers=headers, form=kwargs) + clt_cnx = repoapi.ClientConnection(self._session) + req.set_cnx(clt_cnx) + with clt_cnx: + yield req + + def close(self): + """Close the session associated to the RepoAccess""" + if self._session is not None: + self._repo.close(self._session.id) + self._session = None + + + # base class for cubicweb tests requiring a full cw environments ############### class CubicWebTC(TestCase): @@ -176,21 +247,195 @@ """ appid = 'data' configcls = devtools.ApptestConfiguration - reset_schema = reset_vreg = False # reset schema / vreg between tests tags = TestCase.tags | Tags('cubicweb', 'cw_repo') test_db_id = DEFAULT_EMPTY_DB_ID _cnxs = set() # establised connection - _cnx = None # current connection + # stay on connection for leak detection purpose + + def __init__(self, *args, **kwargs): + self._admin_session = None + self._admin_clt_cnx = None + self._current_session = None + self._current_clt_cnx = None + self.repo = None + self._open_access = set() + super(CubicWebTC, self).__init__(*args, **kwargs) + + # repository connection handling ########################################### + def new_access(self, login): + """provide a new RepoAccess object for a given user + + The access is automatically closed at the end of the test.""" + access = RepoAccess(self.repo, login, self.requestcls) + self._open_access.add(access) + return access + + def _close_access(self): + while self._open_access: + self._open_access.pop().close() + + @deprecated('[4.0] explicitly use RepoAccess object in test instead') + def set_cnx(self, cnx): + """""" + # XXX we want to deprecate this + assert getattr(cnx, '_session', None) is not None + if cnx is self._admin_clt_cnx: + self._pop_custom_cnx() + else: + self._cnxs.add(cnx) # register the cns to make sure it is removed + self._current_session = cnx._session + self._current_clt_cnx = cnx - # Too much complicated stuff. the class doesn't need to bear the repo anymore - @classmethod - def set_cnx(cls, cnx): - cls._cnxs.add(cnx) - cls._cnx = cnx + @property + @deprecated('[4.0] explicitly use RepoAccess object in test instead') + def cnx(self): + # XXX we want to deprecate this + clt_cnx = self._current_clt_cnx + if clt_cnx is None: + clt_cnx = self._admin_clt_cnx + return clt_cnx + + def _close_cnx(self): + """ensure that all cnx used by a test have been closed""" + for cnx in list(self._cnxs): + if cnx._open and not cnx._session.closed: + cnx.rollback() + cnx.close() + self._cnxs.remove(cnx) + + @property + @deprecated('[4.0] explicitly use RepoAccess object in test instead') + def session(self): + """return current server side session""" + # XXX We want to use a srv_connection instead and deprecate this + # property + session = self._current_session + if session is None: + session = self._admin_session + # bypassing all sanity to use the same repo cnx in the session we + # can't call set_cnx as the Connection is not managed by the + # session. + session._Session__threaddata.cnx = self._admin_clt_cnx._cnx + session.set_cnxset() + return session + + @property + @deprecated('[4.0] explicitly use RepoAccess object in test instead') + def websession(self): + return self.session @property - def cnx(self): - return self.__class__._cnx + @deprecated('[4.0] explicitly use RepoAccess object in test instead') + def adminsession(self): + """return current server side session (using default manager account)""" + return self._admin_session + + @deprecated('[4.0] explicitly use RepoAccess object in test instead') + def login(self, login, **kwargs): + """return a connection for the given login/password""" + __ = kwargs.pop('autoclose', True) # not used anymore + if login == self.admlogin: + # definitly don't want autoclose when used as a context manager + clt_cnx = repoapi.ClientConnection(self._admin_session) + else: + if not kwargs: + kwargs['password'] = str(login) + clt_cnx = repoapi.connect(self.repo, login, **kwargs) + self.set_cnx(clt_cnx) + clt_cnx.__enter__() + return TestCaseConnectionProxy(self, clt_cnx) + + @deprecated('[4.0] explicitly use RepoAccess object in test instead') + def restore_connection(self): + self._pop_custom_cnx() + + def _pop_custom_cnx(self): + if self._current_clt_cnx is not None: + if self._current_clt_cnx._open: + self._current_clt_cnx.close() + if not self._current_session.closed: + self.repo.close(self._current_session.id) + self._current_clt_cnx = None + self._current_session = None + + #XXX this doesn't need to a be classmethod anymore + def _init_repo(self): + """init the repository and connection to it. + """ + # setup configuration for test + self.init_config(self.config) + # get or restore and working db. + db_handler = devtools.get_test_db_handler(self.config) + db_handler.build_db_cache(self.test_db_id, self.pre_setup_database) + + db_handler.restore_database(self.test_db_id) + self.repo = db_handler.get_repo(startup=True) + # get an admin session (without actual login) + sources = db_handler.config.sources() + login = unicode(sources['admin']['login']) + self.admin_access = self.new_access(login) + self._admin_session = self.admin_access._session + self._admin_clt_cnx = repoapi.ClientConnection(self._admin_session) + self._cnxs.add(self._admin_clt_cnx) + self._admin_clt_cnx.__enter__() + self.config.repository = lambda x=None: self.repo + + # db api ################################################################## + + @nocoverage + @deprecated('[4.0] explicitly use RepoAccess object in test instead') + def cursor(self, req=None): + if req is not None: + return req.cnx + else: + return self.cnx + + @nocoverage + @deprecated('[4.0] explicitly use RepoAccess object in test instead') + def execute(self, rql, args=None, req=None): + """executes <rql>, builds a resultset, and returns a couple (rset, req) + where req is a FakeRequest + """ + req = req or self.request(rql=rql) + return req.execute(unicode(rql), args) + + @nocoverage + @deprecated('[4.0] explicitly use RepoAccess object in test instead') + def commit(self): + try: + return self.cnx.commit() + finally: + self.session.set_cnxset() # ensure cnxset still set after commit + + @nocoverage + @deprecated('[4.0] explicitly use RepoAccess object in test instead') + def rollback(self): + try: + self.cnx.rollback() + except dbapi.ProgrammingError: + pass # connection closed + finally: + self.session.set_cnxset() # ensure cnxset still set after commit + + requestcls = fake.FakeRequest + @deprecated('[4.0] explicitly use RepoAccess object in test instead') + def request(self, rollbackfirst=False, url=None, headers={}, **kwargs): + """return a web ui request""" + if rollbackfirst: + self.cnx.rollback() + req = self.requestcls(self.vreg, url=url, headers=headers, form=kwargs) + req.set_cnx(self.cnx) + return req + + # server side db api ####################################################### + + @deprecated('[4.0] explicitly use RepoAccess object in test instead') + def sexecute(self, rql, args=None): + self.session.set_cnxset() + return self.session.execute(rql, args) + + + # config management ######################################################## @classproperty def config(cls): @@ -238,32 +483,10 @@ except Exception: # not in server only configuration pass - #XXX this doesn't need to a be classmethod anymore - @classmethod - def _init_repo(cls): - """init the repository and connection to it. - """ - # setup configuration for test - cls.init_config(cls.config) - # get or restore and working db. - db_handler = devtools.get_test_db_handler(cls.config) - db_handler.build_db_cache(cls.test_db_id, cls.pre_setup_database) + @property + def vreg(self): + return self.repo.vreg - cls.repo, cnx = db_handler.get_repo_and_cnx(cls.test_db_id) - # no direct assignation to cls.cnx anymore. - # cnx is now an instance property that use a class protected attributes. - cls.set_cnx(cnx) - cls.vreg = cls.repo.vreg - cls.websession = dbapi.DBAPISession(cnx, cls.admlogin) - cls._orig_cnx = (cnx, cls.websession) - cls.config.repository = lambda x=None: cls.repo - - def _close_cnx(self): - for cnx in list(self._cnxs): - if not cnx._closed: - cnx.rollback() - cnx.close() - self._cnxs.remove(cnx) # global resources accessors ############################################### @@ -272,18 +495,6 @@ """return the application schema""" return self.vreg.schema - @property - def session(self): - """return current server side session (using default manager account)""" - session = self.repo._sessions[self.cnx.sessionid] - session.set_cnxset() - return session - - @property - def adminsession(self): - """return current server side session (using default manager account)""" - return self.repo._sessions[self._orig_cnx[0].sessionid] - def shell(self): """return a shell session object""" from cubicweb.server.migractions import ServerMigrationHelper @@ -323,6 +534,14 @@ def tearDown(self): # XXX hack until logilab.common.testlib is fixed + if self._admin_clt_cnx is not None: + if self._admin_clt_cnx._open: + self._admin_clt_cnx.close() + self._admin_clt_cnx = None + if self._admin_session is not None: + if not self._admin_session.closed: + self.repo.close(self._admin_session.id) + self._admin_session = None while self._cleanups: cleanup, args, kwargs = self._cleanups.pop(-1) cleanup(*args, **kwargs) @@ -351,8 +570,7 @@ def user(self, req=None): """return the application schema""" if req is None: - req = self.request() - return self.cnx.user(req) + return self.request().user else: return req.user @@ -389,65 +607,6 @@ req.cnx.commit() return user - def login(self, login, **kwargs): - """return a connection for the given login/password""" - if login == self.admlogin: - self.restore_connection() - # definitly don't want autoclose when used as a context manager - return self.cnx - autoclose = kwargs.pop('autoclose', True) - if not kwargs: - kwargs['password'] = str(login) - self.set_cnx(dbapi._repo_connect(self.repo, unicode(login), **kwargs)) - self.websession = dbapi.DBAPISession(self.cnx) - if login == self.vreg.config.anonymous_user()[0]: - self.cnx.anonymous_connection = True - if autoclose: - return TestCaseConnectionProxy(self, self.cnx) - return self.cnx - - def restore_connection(self): - if not self.cnx is self._orig_cnx[0]: - if not self.cnx._closed: - self.cnx.close() - cnx, self.websession = self._orig_cnx - self.set_cnx(cnx) - - # db api ################################################################## - - @nocoverage - def cursor(self, req=None): - return self.cnx.cursor(req or self.request()) - - @nocoverage - def execute(self, rql, args=None, req=None): - """executes <rql>, builds a resultset, and returns a couple (rset, req) - where req is a FakeRequest - """ - req = req or self.request(rql=rql) - return req.execute(unicode(rql), args) - - @nocoverage - def commit(self): - try: - return self.cnx.commit() - finally: - self.session.set_cnxset() # ensure cnxset still set after commit - - @nocoverage - def rollback(self): - try: - self.cnx.rollback() - except dbapi.ProgrammingError: - pass # connection closed - finally: - self.session.set_cnxset() # ensure cnxset still set after commit - - # server side db api ####################################################### - - def sexecute(self, rql, args=None): - self.session.set_cnxset() - return self.session.execute(rql, args) # other utilities ######################################################### @@ -630,21 +789,12 @@ @cached def app(self): """return a cubicweb publisher""" - publisher = application.CubicWebPublisher(self.config, vreg=self.vreg) + publisher = application.CubicWebPublisher(self.repo, self.config) def raise_error_handler(*args, **kwargs): raise publisher.error_handler = raise_error_handler return publisher - requestcls = fake.FakeRequest - def request(self, rollbackfirst=False, url=None, headers={}, **kwargs): - """return a web ui request""" - req = self.requestcls(self.vreg, url=url, headers=headers, form=kwargs) - if rollbackfirst: - self.websession.cnx.rollback() - req.set_session(self.websession) - return req - def remote_call(self, fname, *args): """remote json call simulation""" dump = json.dumps @@ -765,33 +915,29 @@ def init_authentication(self, authmode, anonuser=None): self.set_auth_mode(authmode, anonuser) - req = self.request(url='login') - origsession = req.session - req.session = req.cnx = None - del req.execute # get back to class implementation + req = self.requestcls(self.vreg, url='login') sh = self.app.session_handler authm = sh.session_manager.authmanager authm.anoninfo = self.vreg.config.anonymous_user() authm.anoninfo = authm.anoninfo[0], {'password': authm.anoninfo[1]} # not properly cleaned between tests self.open_sessions = sh.session_manager._sessions = {} - return req, origsession + return req, self.websession def assertAuthSuccess(self, req, origsession, nbsessions=1): sh = self.app.session_handler - self.app.connect(req) - session = req.session + session = self.app.get_session(req) + clt_cnx = repoapi.ClientConnection(session) + req.set_cnx(clt_cnx) self.assertEqual(len(self.open_sessions), nbsessions, self.open_sessions) self.assertEqual(session.login, origsession.login) self.assertEqual(session.anonymous_session, False) def assertAuthFailure(self, req, nbsessions=0): - self.app.connect(req) - self.assertIsInstance(req.session, dbapi.DBAPISession) - self.assertEqual(req.session.cnx, None) - self.assertIsInstance(req.cnx, (dbapi._NeedAuthAccessMock, NoneType)) - # + 1 since we should still have session without connection set - self.assertEqual(len(self.open_sessions), nbsessions + 1) + with self.assertRaises(AuthenticationError): + self.app.get_session(req) + # +0 since we do not track the opened session + self.assertEqual(len(self.open_sessions), nbsessions) clear_cache(req, 'get_authorization') # content validation #######################################################
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/4.0.rst Mon Jan 13 13:47:47 2014 +0100 @@ -0,0 +1,142 @@ +What's new in CubicWeb 4.0? +============================ + +Behavior Changes +---------------- + +* The anonymous property of Session and Connection are now computed from the + related user login. If it match the ``anonymous-user`` in the config the + connection is anonymous. Beware that the ``anonymous-user`` config is web + specific. Therefore, no session may be anonymous in repository only setup. + +New Repository Access API +------------------------- + +Connection replace Session +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A new explicite Connection object replace Session as the main repository entry +point. Connection hold all the necessary methods to be used Server side +(``execute``, ``commit``, ``rollback``, ``call_service``, ``entity_from_eid``, +etc…). You obtains a new Connection object using ``session.new_cnx()``. +Connection object need have an explicite begin and end. Use them as a context +manager:: + + with session.new_cnx() as cnx: + self.execute('INSERT Elephant E, E name "Cabar"') + self.commit() + self.execute('INSERT Elephant E, E name "Celeste"') + self.commit() + # Once you get out of the "with" clause, the connection is closed. + +Using the same Connection object in multiple threads will give you access to the +same Transaction. However, Connection object are not thread safe. + +``repository.internal_session`` is deprecated in favor of +``repository.internal_cnx``. Note that internal connection are now safe. +Integrity hooks are enabled. + +Backward compact is preserved on Session. They can still be used to access the +database for the next few version. + +dbapi vs repoapi +~~~~~~~~~~~~~~~~ + +A new API have been introduced to replace the dbapi. It is called "repoapi". + +there is three relevant function for now: + +``repoapi.get_repository(config)`` takes a config object and return credential + +``repoapi.connect(repo, **credential)`` return a ClientConnection associated to +the user identified by the credential. The ClientConnection is associated to its +own Session that is closed when the ClientConnection is closed. A +ClientConnection is a Connection-like object to be used client side. + +``repoapi.anonymous_cnx(repo)`` return a ClientConnection associated to the +anonymous user if describe in the Config. + +repoapi.ClientConnection replace dbapi.Connection and company +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +On the client/web side, the Request is now using a ``repoapi.ClientConnection`` +instead of a ``dbapi.connection``. The ``ClientConnection`` have multiple backward +compat method to looks like a ``dbapi.Cursor`` and ``dbapi.Connection``. It will +remain that way for a few version. + +Session used on the Web side are now the same than the one used Server side. +Some backward compat method have been installed on the server side Session to +ease the transition. + +The authentification stack have been altered to use the ``repoapi`` instead of +the ``dbapi``. Cubes adding new element in this stack are likely to break. + +New API in test +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +All current methods and attributes used to access the repo on ``CubicwebTC`` are +deprecated. You can now use a ``RepoAccess`` object. A ``RepoAccess`` object is +linked to a new ``Session`` for a specified user. It is able to create +``Connection``, ``ClientConnection`` and web side request linked to this +session:: + + access =self.new_access("babar") # create a new RepoAccess for user babar + with access.repo_cnx() as cnx: + # some work with server side cnx + cnx.execute(…) + cnx.commit() + cnx.execute(…) + cnx.commit() + + with access.client_cnx() as cnx: + # some work with client side cnx + cnx.execute(…) + cnx.commit() + + with access.web_request(elephant="babar") as req: + # some work with client side cnx + elephant_name = req.form["elephant"] + req.execute(…) + req.cnx.commit() + +By default ``testcase.admin_access`` contains a ``RepoAccess`` object for the +default admin session. + + +API changes +----------- + +* ``RepositorySessionManager.postlogin`` is now called with two arguments, + request and session. And this now happens before the session is linked to the + request. + +* ``SessionManager`` and ``AuthenticationManager`` now take a repo object at + initialization time instead of a vreg. + +* The ``async`` argument of ``_cw.call_service`` have been dropped. All call are + now synchronous. The zmq notification bus looks like a good replacement for + most async usecase. + +* ``repo.stats()`` is now deprecated. The same information are available through + a service (``_cw.call_service('repo_stats')``) + +* ``repo.gc_stats()`` is now deprecated. The same information are available through + a service (``_cw.call_service('repo_gc_stats')``) + +* ``request.set_session`` no longer takes an optional ``user`` argument. + +* CubicwebTC does not have repo and cnx as class attributes anymore. They are + standard instance attributes. ``set_cnx`` and ``_init_repo`` class methods + become instance methods. + +* ``set_cnxset`` and ``free_cnxset`` are deprecated. cnxset are now + automatically managed. + + +Deprecated Code Drops +---------------------- + +* The ldapuser source has been dropped. ldapfeed is the only official source + remaining for ldap. + +* session.hijack_user mechanism has been dropped.
--- a/entities/authobjs.py Fri Jan 10 18:31:07 2014 +0100 +++ b/entities/authobjs.py Mon Jan 13 13:47:47 2014 +0100 @@ -166,6 +166,17 @@ dc_long_title = name + def __call__(self, *args, **kwargs): + """ugly hack for compatibility betweeb dbapi and repo api + + In the dbapi, Connection and Session have a ``user`` method to + generated a user for a request In the repo api, Connection and Session + have a user attribute inherited from SessionRequestBase prototype. This + ugly hack allows to not break user of the user method. + + XXX Deprecate me ASAP""" + return self + from logilab.common.deprecation import class_renamed EUser = class_renamed('EUser', CWUser) EGroup = class_renamed('EGroup', CWGroup)
--- a/etwist/server.py Fri Jan 10 18:31:07 2014 +0100 +++ b/etwist/server.py Mon Jan 13 13:47:47 2014 +0100 @@ -57,12 +57,12 @@ class CubicWebRootResource(resource.Resource): - def __init__(self, config, vreg=None): + def __init__(self, config, repo): resource.Resource.__init__(self) self.config = config # instantiate publisher here and not in init_publisher to get some # checks done before daemonization (eg versions consistency) - self.appli = CubicWebPublisher(config, vreg=vreg) + self.appli = CubicWebPublisher(repo, config) self.base_url = config['base-url'] self.https_url = config['https-url'] global MAX_POST_LENGTH @@ -270,12 +270,20 @@ LOGGER = getLogger('cubicweb.twisted') set_log_methods(CubicWebRootResource, LOGGER) -def run(config, vreg=None, debug=None): +def run(config, debug=None, repo=None): + # repo may by passed during test. + # + # Test has already created a repo object so we should not create a new one. + # Explicitly passing the repo object avoid relying on the fragile + # config.repository() cache. We could imagine making repo a mandatory + # argument and receives it from the starting command directly. if debug is not None: config.debugmode = debug config.check_writeable_uid_directory(config.appdatahome) # create the site - root_resource = CubicWebRootResource(config, vreg=vreg) + if repo is None: + repo = config.repository() + root_resource = CubicWebRootResource(config, repo) website = server.Site(root_resource) # serve it via standard HTTP on port set in the configuration port = config['port'] or 8080
--- a/hooks/syncschema.py Fri Jan 10 18:31:07 2014 +0100 +++ b/hooks/syncschema.py Mon Jan 13 13:47:47 2014 +0100 @@ -196,7 +196,7 @@ clear_cache(eschema, 'ordered_relations') def postcommit_event(self): - rebuildinfered = self.session.data.get('rebuild-infered', True) + rebuildinfered = self.session.get_shared_data('rebuild-infered', True) repo = self.session.repo # commit event should not raise error, while set_schema has chances to # do so because it triggers full vreg reloading
--- a/hooks/test/unittest_syncschema.py Fri Jan 10 18:31:07 2014 +0100 +++ b/hooks/test/unittest_syncschema.py Mon Jan 13 13:47:47 2014 +0100 @@ -30,7 +30,6 @@ del SchemaModificationHooksTC.schema_eids class SchemaModificationHooksTC(CubicWebTC): - reset_schema = True def setUp(self): super(SchemaModificationHooksTC, self).setUp()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/repoapi.py Mon Jan 13 13:47:47 2014 +0100 @@ -0,0 +1,357 @@ +# copyright 2013-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""Official API to access the content of a repository +""" + +from logilab.common.deprecation import deprecated + +from cubicweb.utils import parse_repo_uri +from cubicweb import ConnectionError, ProgrammingError, AuthenticationError +from uuid import uuid4 +from contextlib import contextmanager +from cubicweb.req import RequestSessionBase +from functools import wraps + +### private function for specific method ############################ + +def _get_inmemory_repo(config, vreg=None): + from cubicweb.server.repository import Repository + from cubicweb.server.utils import TasksManager + return Repository(config, TasksManager(), vreg=vreg) + + +### public API ###################################################### + +def get_repository(uri=None, config=None, vreg=None): + """get a repository for the given URI or config/vregistry (in case we're + loading the repository for a client, eg web server, configuration). + + The returned repository may be an in-memory repository or a proxy object + using a specific RPC method, depending on the given URI (pyro or zmq). + """ + if uri is None: + return _get_inmemory_repo(config, vreg) + + protocol, hostport, appid = parse_repo_uri(uri) + + if protocol == 'inmemory': + # me may have been called with a dummy 'inmemory://' uri ... + return _get_inmemory_repo(config, vreg) + + if protocol == 'pyroloc': # direct connection to the instance + from logilab.common.pyro_ext import get_proxy + uri = uri.replace('pyroloc', 'PYRO') + return get_proxy(uri) + + if protocol == 'pyro': # connection mediated through the pyro ns + from logilab.common.pyro_ext import ns_get_proxy + path = appid.strip('/') + if not path: + raise ConnectionError( + "can't find instance name in %s (expected to be the path component)" + % uri) + if '.' in path: + nsgroup, nsid = path.rsplit('.', 1) + else: + nsgroup = 'cubicweb' + nsid = path + return ns_get_proxy(nsid, defaultnsgroup=nsgroup, nshost=hostport) + + if protocol.startswith('zmqpickle-'): + from cubicweb.zmqclient import ZMQRepositoryClient + return ZMQRepositoryClient(uri) + else: + raise ConnectionError('unknown protocol: `%s`' % protocol) + +def connect(repo, login, **kwargs): + """Take credential and return associated ClientConnection. + + The ClientConnection is associated to a new Session object that will be + closed when the ClientConnection is closed. + + raise AuthenticationError if the credential are invalid.""" + sessionid = repo.connect(login, **kwargs) + session = repo._get_session(sessionid) + # XXX the autoclose_session should probably be handle on the session directly + # this is something to consider once we have proper server side Connection. + return ClientConnection(session, autoclose_session=True) + +def anonymous_cnx(repo): + """return a ClientConnection for Anonymous user. + + The ClientConnection is associated to a new Session object that will be + closed when the ClientConnection is closed. + + raises an AuthenticationError if anonymous usage is not allowed + """ + anoninfo = getattr(repo.config, 'anonymous_user', lambda: None)() + if anoninfo is None: # no anonymous user + raise AuthenticationError('anonymous access is not authorized') + anon_login, anon_password = anoninfo + # use vreg's repository cache + return connect(repo, anon_login, password=anon_password) + +def _srv_cnx_func(name): + """Decorate ClientConnection method blindly forward to Connection + THIS TRANSITIONAL PURPOSE + + will be dropped when we have standalone connection""" + def proxy(clt_cnx, *args, **kwargs): + # the ``with`` dance is transitional. We do not have Standalone + # Connection yet so we use this trick to unsure the session have the + # proper cnx loaded. This can be simplified one we have Standalone + # Connection object + if not clt_cnx._open: + raise ProgrammingError('Closed client connection') + return getattr(clt_cnx._cnx, name)(*args, **kwargs) + return proxy + +def _open_only(func): + """decorator for ClientConnection method that check it is open""" + @wraps(func) + def check_open(clt_cnx, *args, **kwargs): + if not clt_cnx._open: + raise ProgrammingError('Closed client connection') + return func(clt_cnx, *args, **kwargs) + return check_open + + +class ClientConnection(RequestSessionBase): + """A Connection object to be used Client side. + + This object is aimed to be used client side (so potential communication + with the repo through RTC) and aims to offer some compatibility with the + cubicweb.dbapi.Connection interface. + + The autoclose_session paramenter informs the connection that this session + have been open explictly and only for this client connection. The + connection will close the session of exit. + """ + # make exceptions available through the connection object + ProgrammingError = ProgrammingError + # attributes that may be overriden per connection instance + anonymous_connection = False # XXX really needed ? + is_repo_in_memory = True # BC, always true + + def __init__(self, session, autoclose_session=False): + self._session = session # XXX there is no real reason to keep the + # session around function still using it should + # be rewritten and migrated. + self._cnx = None + self._open = None + self._web_request = False + self.vreg = session.vreg + self._set_user(session.user) + self._autoclose_session = autoclose_session + + def __enter__(self): + assert self._open is None + self._open = True + self._cnx = self._session.new_cnx() + self._cnx.__enter__() + self._cnx.ctx_count += 1 + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self._open = False + self._cnx.ctx_count -= 1 + self._cnx.__exit__(exc_type, exc_val, exc_tb) + self._cnx = None + if self._autoclose_session: + # we have to call repo.close to unsure the repo properly forget the + # session calling session.close() is not enought :-( + self._session.repo.close(self._session.id) + + + # begin silly BC + @property + def _closed(self): + return not self._open + + def close(self): + if self._open: + self.__exit__(None, None, None) + + def __repr__(self): + # XXX we probably want to reference the user of the session here + if self._open is None: + return '<ClientConnection (not open yet)>' + elif not self._open: + return '<ClientConnection (closed)>' + elif self.anonymous_connection: + return '<ClientConnection %s (anonymous)>' % self._cnx.connectionid + else: + return '<ClientConnection %s>' % self._cnx.connectionid + # end silly BC + + # Main Connection purpose in life ######################################### + + call_service = _srv_cnx_func('call_service') + + @_open_only + def execute(self, *args, **kwargs): + # the ``with`` dance is transitional. We do not have Standalone + # Connection yet so we use this trick to unsure the session have the + # proper cnx loaded. This can be simplified one we have Standalone + # Connection object + rset = self._cnx.execute(*args, **kwargs) + rset.req = self + # XXX keep the same behavior as the old dbapi + # otherwise multiple tests break. + # The little internet kitten is very sad about this situation. + rset._rqlst = None + return rset + + commit = _srv_cnx_func('commit') + rollback = _srv_cnx_func('rollback') + + # session data methods ##################################################### + + get_shared_data = _srv_cnx_func('get_shared_data') + set_shared_data = _srv_cnx_func('set_shared_data') + + # meta-data accessors ###################################################### + + @_open_only + def source_defs(self): + """Return the definition of sources used by the repository.""" + return self._session.repo.source_defs() + + @_open_only + def get_schema(self): + """Return the schema currently used by the repository.""" + return self._session.repo.source_defs() + + @_open_only + def get_option_value(self, option, foreid=None): + """Return the value for `option` in the configuration. If `foreid` is + specified, the actual repository to which this entity belongs is + dereferenced and the option value retrieved from it. + """ + return self._session.repo.get_option_value(option, foreid) + + describe = _srv_cnx_func('describe') + + # undo support ############################################################ + + @_open_only + def undoable_transactions(self, ueid=None, req=None, **actionfilters): + """Return a list of undoable transaction objects by the connection's + user, ordered by descendant transaction time. + + Managers may filter according to user (eid) who has done the transaction + using the `ueid` argument. Others will only see their own transactions. + + Additional filtering capabilities is provided by using the following + named arguments: + + * `etype` to get only transactions creating/updating/deleting entities + of the given type + + * `eid` to get only transactions applied to entity of the given eid + + * `action` to get only transactions doing the given action (action in + 'C', 'U', 'D', 'A', 'R'). If `etype`, action can only be 'C', 'U' or + 'D'. + + * `public`: when additional filtering is provided, their are by default + only searched in 'public' actions, unless a `public` argument is given + and set to false. + """ + # the ``with`` dance is transitional. We do not have Standalone + # Connection yet so we use this trick to unsure the session have the + # proper cnx loaded. This can be simplified one we have Standalone + # Connection object + source = self._cnx.repo.system_source + txinfos = source.undoable_transactions(self._cnx, ueid, **actionfilters) + for txinfo in txinfos: + txinfo.req = req or self # XXX mostly wrong + return txinfos + + @_open_only + def transaction_info(self, txuuid, req=None): + """Return transaction object for the given uid. + + raise `NoSuchTransaction` if not found or if session's user is not + allowed (eg not in managers group and the transaction doesn't belong to + him). + """ + # the ``with`` dance is transitional. We do not have Standalone + # Connection yet so we use this trick to unsure the session have the + # proper cnx loaded. This can be simplified one we have Standalone + # Connection object + txinfo = self._cnx.repo.system_source.tx_info(self._cnx, txuuid) + if req: + txinfo.req = req + else: + txinfo.cnx = self + return txinfo + + @_open_only + def transaction_actions(self, txuuid, public=True): + """Return an ordered list of action effectued during that transaction. + + If public is true, return only 'public' actions, eg not ones triggered + under the cover by hooks, else return all actions. + + raise `NoSuchTransaction` if the transaction is not found or if + session's user is not allowed (eg not in managers group and the + transaction doesn't belong to him). + """ + # the ``with`` dance is transitional. We do not have Standalone + # Connection yet so we use this trick to unsure the session have the + # proper cnx loaded. This can be simplified one we have Standalone + # Connection object + return self._cnx.repo.system_source.tx_actions(self._cnx, txuuid, public) + + @_open_only + def undo_transaction(self, txuuid): + """Undo the given transaction. Return potential restoration errors. + + raise `NoSuchTransaction` if not found or if session's user is not + allowed (eg not in managers group and the transaction doesn't belong to + him). + """ + # the ``with`` dance is transitional. We do not have Standalone + # Connection yet so we use this trick to unsure the session have the + # proper cnx loaded. This can be simplified one we have Standalone + # Connection object + return self._cnx.repo.system_source.undo_transaction(self._cnx, txuuid) + + @deprecated('[4.0] This is a repoapi.ClientConnection object not a dbapi one') + def request(self): + return self + + @deprecated('[4.0] This is a repoapi.ClientConnection object not a dbapi one') + def cursor(self): + return self + + @ property + @deprecated('[4.0] This is a repoapi.ClientConnection object not a dbapi one') + def sessionid(self): + return self._session.id + + @property + @deprecated('[4.0] This is a repoapi.ClientConnection object not a dbapi one') + def connection(self): + return self + + @property + @deprecated('[4.0] This is a repoapi.ClientConnection object not a dbapi one') + def _repo(self): + return self._session.repo
--- a/req.py Fri Jan 10 18:31:07 2014 +0100 +++ b/req.py Mon Jan 13 13:47:47 2014 +0100 @@ -75,6 +75,23 @@ self.local_perm_cache = {} self._ = unicode + def _set_user(self, orig_user): + """set the user for this req_session_base + + A special method is needed to ensure the linked user is linked to the + connection too. + """ + # cnx validity is checked by the call to .user_info + rset = self.eid_rset(orig_user.eid, 'CWUser') + user_cls = self.vreg['etypes'].etype_class('CWUser') + user = user_cls(self, rset, row=0, groups=orig_user.groups, + properties=orig_user.properties) + user.cw_attr_cache['login'] = orig_user.login # cache login + self.user = user + self.set_entity_cache(user) + self.set_language(user.prefered_language()) + + def set_language(self, lang): """install i18n configuration for `lang` translation. @@ -253,24 +270,20 @@ """ # use *args since we don't want first argument to be "anonymous" to # avoid potential clash with kwargs + method = None if args: assert len(args) == 1, 'only 0 or 1 non-named-argument expected' method = args[0] - else: - method = None + if method is None: + method = 'view' # XXX I (adim) think that if method is passed explicitly, we should # not try to process it and directly call req.build_url() - if method is None: - if self.from_controller() == 'view' and not '_restpath' in kwargs: - method = self.relative_path(includeparams=False) or 'view' - else: - method = 'view' base_url = kwargs.pop('base_url', None) if base_url is None: secure = kwargs.pop('__secure__', None) base_url = self.base_url(secure=secure) if '_restpath' in kwargs: - assert method == 'view', method + assert method == 'view', repr(method) path = kwargs.pop('_restpath') else: path = method
--- a/server/__init__.py Fri Jan 10 18:31:07 2014 +0100 +++ b/server/__init__.py Mon Jan 13 13:47:47 2014 +0100 @@ -31,8 +31,6 @@ from logilab.common.modutils import LazyObject from logilab.common.textutils import splitstrip from logilab.common.registry import yes -from logilab import database - from yams import BASE_GROUPS from cubicweb import CW_SOFTWARE_ROOT
--- a/server/migractions.py Fri Jan 10 18:31:07 2014 +0100 +++ b/server/migractions.py Mon Jan 13 13:47:47 2014 +0100 @@ -102,7 +102,7 @@ # no config on shell to a remote instance if config is not None and (cnx or connect): repo = self.repo - self.session.data['rebuild-infered'] = False + self.session.set_shared_data('rebuild-infered', False) # register a hook to clear our group_mapping cache and the # self._synchronized set when some group is added or updated ClearGroupMap.mih = self @@ -292,7 +292,7 @@ print 'aborting...' sys.exit(0) self.session.keep_cnxset_mode('transaction') - self.session.data['rebuild-infered'] = False + self.session.set_shared_data('rebuild-infered', False) return self._cnx @property
--- a/server/repository.py Fri Jan 10 18:31:07 2014 +0100 +++ b/server/repository.py Mon Jan 13 13:47:47 2014 +0100 @@ -36,9 +36,11 @@ from os.path import join from datetime import datetime from time import time, localtime, strftime +from contextlib import contextmanager from warnings import warn from logilab.common.decorators import cached, clear_cache +from logilab.common.deprecation import deprecated from logilab.common.compat import any from logilab.common import flatten @@ -498,51 +500,35 @@ def _build_user(self, session, eid): """return a CWUser entity for user with the given eid""" - cls = self.vreg['etypes'].etype_class('CWUser') - st = cls.fetch_rqlst(session.user, ordermethod=None) - st.add_eid_restriction(st.get_variable('X'), 'x', 'Substitute') - rset = session.execute(st.as_string(), {'x': eid}) - assert len(rset) == 1, rset - cwuser = rset.get_entity(0, 0) - # pylint: disable=W0104 - # prefetch / cache cwuser's groups and properties. This is especially - # useful for internal sessions to avoid security insertions - cwuser.groups - cwuser.properties - return cwuser + with session.ensure_cnx_set: + cls = self.vreg['etypes'].etype_class('CWUser') + st = cls.fetch_rqlst(session.user, ordermethod=None) + st.add_eid_restriction(st.get_variable('X'), 'x', 'Substitute') + rset = session.execute(st.as_string(), {'x': eid}) + assert len(rset) == 1, rset + cwuser = rset.get_entity(0, 0) + # pylint: disable=W0104 + # prefetch / cache cwuser's groups and properties. This is especially + # useful for internal sessions to avoid security insertions + cwuser.groups + cwuser.properties + return cwuser # public (dbapi) interface ################################################ + @deprecated("[4.0] use _cw.call_service('repo_stats'") def stats(self): # XXX restrict to managers session? """Return a dictionary containing some statistics about the repository resources usage. This is a public method, not requiring a session id. + + This method is deprecated in favor of using _cw.call_service('repo_stats') """ - results = {} - querier = self.querier - source = self.system_source - for size, maxsize, hits, misses, title in ( - (len(querier._rql_cache), self.config['rql-cache-size'], - querier.cache_hit, querier.cache_miss, 'rqlt_st'), - (len(source._cache), self.config['rql-cache-size'], - source.cache_hit, source.cache_miss, 'sql'), - ): - results['%s_cache_size' % title] = '%s / %s' % (size, maxsize) - results['%s_cache_hit' % title] = hits - results['%s_cache_miss' % title] = misses - results['%s_cache_hit_percent' % title] = (hits * 100) / (hits + misses) - results['type_source_cache_size'] = len(self._type_source_cache) - results['extid_cache_size'] = len(self._extid_cache) - results['sql_no_cache'] = self.system_source.no_cache - results['nb_open_sessions'] = len(self._sessions) - results['nb_active_threads'] = threading.activeCount() - looping_tasks = self._tasks_manager._looping_tasks - results['looping_tasks'] = ', '.join(str(t) for t in looping_tasks) - results['available_cnxsets'] = self._cnxsets_pool.qsize() - results['threads'] = ', '.join(sorted(str(t) for t in threading.enumerate())) - return results + with self.internal_session() as session: + return session.call_service('repo_stats') + @deprecated("[4.0] use _cw.call_service('repo_gc_stats'") def gc_stats(self, nmax=20): """Return a dictionary containing some statistics about the repository memory usage. @@ -552,33 +538,8 @@ nmax is the max number of (most) referenced object returned as the 'referenced' result """ - - from cubicweb._gcdebug import gc_info - from cubicweb.appobject import AppObject - from cubicweb.rset import ResultSet - from cubicweb.dbapi import Connection, Cursor - from cubicweb.web.request import CubicWebRequestBase - from rql.stmts import Union - - lookupclasses = (AppObject, - Union, ResultSet, - Connection, Cursor, - CubicWebRequestBase) - try: - from cubicweb.server.session import Session, InternalSession - lookupclasses += (InternalSession, Session) - except ImportError: - pass # no server part installed - - results = {} - counters, ocounters, garbage = gc_info(lookupclasses, - viewreferrersclasses=()) - values = sorted(counters.iteritems(), key=lambda x: x[1], reverse=True) - results['lookupclasses'] = values - values = sorted(ocounters.iteritems(), key=lambda x: x[1], reverse=True)[:nmax] - results['referenced'] = values - results['unreachable'] = len(garbage) - return results + with self.internal_session() as session: + return session.call_service('repo_gc_stats', nmax=nmax) def get_schema(self): """Return the instance schema. @@ -848,7 +809,7 @@ self.debug('begin commit for session %s', sessionid) try: session = self._get_session(sessionid) - session.set_tx(txid) + session.set_cnx(txid) return session.commit() except (ValidationError, Unauthorized): raise @@ -861,7 +822,7 @@ self.debug('begin rollback for session %s', sessionid) try: session = self._get_session(sessionid) - session.set_tx(txid) + session.set_cnx(txid) session.rollback() except Exception: self.exception('unexpected error') @@ -883,33 +844,14 @@ del self._sessions[sessionid] self.info('closed session %s for user %s', sessionid, session.user.login) - def call_service(self, sessionid, regid, async, **kwargs): + def call_service(self, sessionid, regid, **kwargs): """ See :class:`cubicweb.dbapi.Connection.call_service` and :class:`cubicweb.server.Service` """ + # XXX lack a txid session = self._get_session(sessionid) - return self._call_service_with_session(session, regid, async, **kwargs) - - def _call_service_with_session(self, session, regid, async, **kwargs): - if async: - self.info('calling service %s asynchronously', regid) - def task(): - session.set_cnxset() - try: - service = session.vreg['services'].select(regid, session, **kwargs) - return service.call(**kwargs) - finally: - session.rollback() # free cnxset - self.threaded_task(task) - else: - self.info('calling service %s synchronously', regid) - session.set_cnxset() - try: - service = session.vreg['services'].select(regid, session, **kwargs) - return service.call(**kwargs) - finally: - session.free_cnxset() + return session._cnx.call_service(regid, **kwargs) def user_info(self, sessionid, props=None): """this method should be used by client to: @@ -997,11 +939,15 @@ nbclosed += 1 return nbclosed + @deprecated("[4.0] use internal_cnx now\n" + "(Beware that integrity hook are now enabled by default)") def internal_session(self, cnxprops=None, safe=False): """return a dbapi like connection/cursor using internal user which have every rights on the repository. The `safe` argument is a boolean flag telling if integrity hooks should be activated or not. + /!\ the safe argument is False by default. + *YOU HAVE TO* commit/rollback or close (rollback implicitly) the session once the job's done, else you'll leak connections set up to the time where no one is available, causing irremediable freeze... @@ -1010,6 +956,22 @@ session.set_cnxset() return session + @contextmanager + def internal_cnx(self): + """return a Connection using internal user which have + every rights on the repository. The `safe` argument is dropped. all + hook are enabled by default. + + /!\ IN OPPOSITE OF THE OLDER INTERNAL_SESSION, + /!\ INTERNAL CONNECTION HAVE ALL HOOKS ENABLED. + + This is to be used a context manager. + """ + with InternalSession(self) as session: + with session.new_cnx() as cnx: + yield cnx + + def _get_session(self, sessionid, setcnxset=False, txid=None, checkshuttingdown=True): """return the session associated with the given session identifier""" @@ -1020,7 +982,7 @@ except KeyError: raise BadConnectionId('No such session %s' % sessionid) if setcnxset: - session.set_tx(txid) # must be done before set_cnxset + session.set_cnx(txid) # must be done before set_cnxset session.set_cnxset() return session
--- a/server/session.py Fri Jan 10 18:31:07 2014 +0100 +++ b/server/session.py Mon Jan 13 13:47:47 2014 +0100 @@ -23,12 +23,15 @@ from time import time from uuid import uuid4 from warnings import warn +import json +import functools +from contextlib import contextmanager from logilab.common.deprecation import deprecated from logilab.common.textutils import unormalize from logilab.common.registry import objectify_predicate -from cubicweb import UnknownEid, QueryError, schema, server +from cubicweb import UnknownEid, QueryError, schema, server, ProgrammingError from cubicweb.req import RequestSessionBase from cubicweb.utils import make_uid from cubicweb.rqlrewrite import RQLRewriter @@ -96,59 +99,75 @@ return obj.deny_all_hooks_but(*categories) -class _hooks_control(object): +class _hooks_control(object): # XXX repoapi: remove me when + # session stop being connection """context manager to control activated hooks categories. - If mode is session.`HOOKS_DENY_ALL`, given hooks categories will + If mode is`HOOKS_DENY_ALL`, given hooks categories will be enabled. - If mode is session.`HOOKS_ALLOW_ALL`, given hooks categories will + If mode is `HOOKS_ALLOW_ALL`, given hooks categories will be disabled. .. sourcecode:: python - with _hooks_control(self.session, self.session.HOOKS_ALLOW_ALL, 'integrity'): + with _hooks_control(cnx, HOOKS_ALLOW_ALL, 'integrity'): # ... do stuff with all but 'integrity' hooks activated - with _hooks_control(self.session, self.session.HOOKS_DENY_ALL, 'integrity'): + with _hooks_control(cnx, HOOKS_DENY_ALL, 'integrity'): # ... do stuff with none but 'integrity' hooks activated This is an internal api, you should rather use - :meth:`~cubicweb.server.session.Session.deny_all_hooks_but` or - :meth:`~cubicweb.server.session.Session.allow_all_hooks_but` session - methods. + :meth:`~cubicweb.server.session.Connection.deny_all_hooks_but` or + :meth:`~cubicweb.server.session.Connection.allow_all_hooks_but` + Transaction methods. """ - def __init__(self, session, mode, *categories): + def __init__(self, cnx, mode, *categories): assert mode in (HOOKS_ALLOW_ALL, HOOKS_DENY_ALL) - self.session = session - self.tx = session._tx + self.cnx = cnx self.mode = mode self.categories = categories self.oldmode = None self.changes = () def __enter__(self): - self.oldmode = self.tx.hooks_mode - self.tx.hooks_mode = self.mode + self.oldmode = self.cnx.hooks_mode + self.cnx.hooks_mode = self.mode if self.mode is HOOKS_DENY_ALL: - self.changes = self.tx.enable_hook_categories(*self.categories) + self.changes = self.cnx.enable_hook_categories(*self.categories) else: - self.changes = self.tx.disable_hook_categories(*self.categories) - self.tx.ctx_count += 1 + self.changes = self.cnx.disable_hook_categories(*self.categories) + self.cnx.ctx_count += 1 def __exit__(self, exctype, exc, traceback): - self.tx.ctx_count -= 1 - if self.tx.ctx_count == 0: - self.session._clear_thread_storage(self.tx) - else: - try: - if self.categories: - if self.mode is HOOKS_DENY_ALL: - self.tx.disable_hook_categories(*self.categories) - else: - self.tx.enable_hook_categories(*self.categories) - finally: - self.tx.hooks_mode = self.oldmode + self.cnx.ctx_count -= 1 + try: + if self.categories: + if self.mode is HOOKS_DENY_ALL: + self.cnx.disable_hook_categories(*self.categories) + else: + self.cnx.enable_hook_categories(*self.categories) + finally: + self.cnx.hooks_mode = self.oldmode + +class _session_hooks_control(_hooks_control): # XXX repoapi: remove me when + # session stop being connection + """hook control context manager for session + + Necessary to handle some unholy transaction scope logic.""" + + + def __init__(self, session, mode, *categories): + self.session = session + super_init = super(_session_hooks_control, self).__init__ + return super_init(session._cnx, mode, *categories) + + def __exit__(self, exctype, exc, traceback): + super_exit = super(_session_hooks_control, self).__exit__ + ret = super_exit(exctype, exc, traceback) + if self.cnx.ctx_count == 0: + self.session._close_cnx(self.cnx) + return ret @deprecated('[3.17] use <object>.security_enabled instead') def security_enabled(obj, *args, **kwargs): @@ -160,9 +179,8 @@ By default security is disabled on queries executed on the repository side. """ - def __init__(self, session, read=None, write=None): - self.session = session - self.tx = session._tx + def __init__(self, cnx, read=None, write=None): + self.cnx = cnx self.read = read self.write = write self.oldread = None @@ -172,24 +190,39 @@ if self.read is None: self.oldread = None else: - self.oldread = self.tx.read_security - self.tx.read_security = self.read + self.oldread = self.cnx.read_security + self.cnx.read_security = self.read if self.write is None: self.oldwrite = None else: - self.oldwrite = self.tx.write_security - self.tx.write_security = self.write - self.tx.ctx_count += 1 + self.oldwrite = self.cnx.write_security + self.cnx.write_security = self.write + self.cnx.ctx_count += 1 def __exit__(self, exctype, exc, traceback): - self.tx.ctx_count -= 1 - if self.tx.ctx_count == 0: - self.session._clear_thread_storage(self.tx) - else: - if self.oldread is not None: - self.tx.read_security = self.oldread - if self.oldwrite is not None: - self.tx.write_security = self.oldwrite + self.cnx.ctx_count -= 1 + if self.oldread is not None: + self.cnx.read_security = self.oldread + if self.oldwrite is not None: + self.cnx.write_security = self.oldwrite + +class _session_security_enabled(_security_enabled): + """hook security context manager for session + + Necessary To handle some unholy transaction scope logic.""" + + + def __init__(self, session, read=None, write=None): + self.session = session + super_init = super(_session_security_enabled, self).__init__ + return super_init(session._cnx, read=read, write=write) + + def __exit__(self, exctype, exc, traceback): + super_exit = super(_session_security_enabled, self).__exit__ + ret = super_exit(exctype, exc, traceback) + if self.cnx.ctx_count == 0: + self.session._close_cnx(self.cnx) + return ret HOOKS_ALLOW_ALL = object() HOOKS_DENY_ALL = object() @@ -199,13 +232,13 @@ pass class CnxSetTracker(object): - """Keep track of which transaction use which cnxset. + """Keep track of which connection use which cnxset. - There should be one of these object per session (including internal sessions). + There should be one of these objects per session (including internal sessions). - Session objects are responsible of creating their CnxSetTracker object. + Session objects are responsible for creating their CnxSetTracker object. - Transactions should use the :meth:`record` and :meth:`forget` to inform the + Connections should use the :meth:`record` and :meth:`forget` to inform the tracker of cnxsets they have acquired. .. automethod:: cubicweb.server.session.CnxSetTracker.record @@ -231,13 +264,13 @@ def __exit__(self, *args): return self._condition.__exit__(*args) - def record(self, txid, cnxset): - """Inform the tracker that a txid has acquired a cnxset + def record(self, cnxid, cnxset): + """Inform the tracker that a cnxid has acquired a cnxset - This method is to be used by Transaction objects. + This method is to be used by Connection objects. This method fails when: - - The txid already has a recorded cnxset. + - The cnxid already has a recorded cnxset. - The tracker is not active anymore. Notes about the caller: @@ -264,19 +297,19 @@ with self._condition: if not self._active: raise SessionClosedError('Closed') - old = self._record.get(txid) + old = self._record.get(cnxid) if old is not None: - raise ValueError('transaction "%s" already has a cnx_set (%r)' - % (txid, old)) - self._record[txid] = cnxset + raise ValueError('connection "%s" already has a cnx_set (%r)' + % (cnxid, old)) + self._record[cnxid] = cnxset - def forget(self, txid, cnxset): - """Inform the tracker that a txid have release a cnxset + def forget(self, cnxid, cnxset): + """Inform the tracker that a cnxid have release a cnxset - This methode is to be used by Transaction object. + This methode is to be used by Connection object. This method fails when: - - The cnxset for the txid does not match the recorded one. + - The cnxset for the cnxid does not match the recorded one. Notes about the caller: (1) It is responsible for releasing the cnxset. @@ -296,11 +329,11 @@ cnxset = repo._free_cnxset(cnxset) # (1) """ with self._condition: - old = self._record.get(txid, None) + old = self._record.get(cnxid, None) if old is not cnxset: raise ValueError('recorded cnxset for "%s" mismatch: %r != %r' - % (txid, old, cnxset)) - self._record.pop(txid) + % (cnxid, old, cnxset)) + self._record.pop(cnxid) self._condition.notify_all() def close(self): @@ -318,7 +351,7 @@ This method is to be used by Session objects. - Returns a tuple of transaction ids that remain open. + Returns a tuple of connection ids that remain open. """ with self._condition: if self._active: @@ -330,10 +363,29 @@ timeout -= time() - start return tuple(self._record) -class Transaction(object): - """Repository Transaction + +def _with_cnx_set(func): + """decorator for Connection method that ensure they run with a cnxset """ + @functools.wraps(func) + def wrapper(cnx, *args, **kwargs): + with cnx.ensure_cnx_set: + return func(cnx, *args, **kwargs) + return wrapper - Holds all transaction related data +def _open_only(func): + """decorator for Connection method that check it is open""" + @functools.wraps(func) + def check_open(cnx, *args, **kwargs): + if not cnx._open: + raise ProgrammingError('Closed Connection: %s' + % cnx.connectionid) + return func(cnx, *args, **kwargs) + return check_open + +class Connection(RequestSessionBase): + """Repository Connection + + Holds all connection related data Database connection resources: @@ -342,11 +394,11 @@ :attr:`cnxset`, the connections set to use to execute queries on sources. If the transaction is read only, the connection set may be freed between - actual queries. This allows multiple transactions with a reasonably low + actual queries. This allows multiple connections with a reasonably low connection set pool size. Control mechanism is detailed below. - .. automethod:: cubicweb.server.session.Transaction.set_cnxset - .. automethod:: cubicweb.server.session.Transaction.free_cnxset + .. automethod:: cubicweb.server.session.Connection.set_cnxset + .. automethod:: cubicweb.server.session.Connection.free_cnxset :attr:`mode`, string telling the connections set handling mode, may be one of 'read' (connections set may be freed), 'write' (some write was done in @@ -387,15 +439,39 @@ """ - def __init__(self, txid, session, rewriter): - #: transaction unique id - self.transactionid = txid + is_request = False + + def __init__(self, session, cnxid=None, session_handled=False): + # using super(Connection, self) confuse some test hack + RequestSessionBase.__init__(self, session.vreg) + # only the session provide explicite + if cnxid is not None: + assert session_handled # only session profive explicite cnxid + #: connection unique id + self._open = None + if cnxid is None: + cnxid = '%s-%s' % (session.id, uuid4().hex) + self.connectionid = cnxid + #: self._session_handled + #: are the life cycle of this Connection automatically controlled by the + #: Session This is the old backward compatibility mode + self._session_handled = session_handled #: reentrance handling self.ctx_count = 0 + #: count the number of entry in a context needing a cnxset + self._cnxset_count = 0 + #: Boolean for compat with the older explicite set_cnxset/free_cnx API + #: When a call set_cnxset is done, no automatic freeing will be done + #: until free_cnx is called. + self._auto_free_cnx_set = True #: server.Repository object self.repo = session.repo self.vreg = self.repo.vreg + self._execute = self.repo.querier.execute + + # other session utility + self._session_timestamp = session._timestamp #: connection handling mode self.mode = session.default_mode @@ -403,11 +479,14 @@ self._cnxset = None #: CnxSetTracker used to report cnxset usage self._cnxset_tracker = session._cnxset_tracker - #: is this transaction from a client or internal to the repo + #: is this connection from a client or internal to the repo self.running_dbapi_query = True + # internal (root) session + self.is_internal_session = session.is_internal_session #: dict containing arbitrary data cleared at the end of the transaction - self.data = {} + self.transaction_data = {} + self._session_data = session.data #: ordered list of operations to be processed on commit/rollback self.pending_operations = [] #: (None, 'precommit', 'postcommit', 'uncommitable') @@ -432,27 +511,79 @@ self.undo_actions = config['undo-enabled'] # RQLRewriter are not thread safe - self._rewriter = rewriter + self._rewriter = RQLRewriter(self) + + # other session utility + if session.user.login == '__internal_manager__': + self.user = session.user + else: + self._set_user(session.user) + + + # live cycle handling #################################################### + + def __enter__(self): + assert self._open is None # first opening + self._open = True + return self + + def __exit__(self, exctype=None, excvalue=None, tb=None): + assert self._open # actually already open + self.free_cnxset(ignoremode=True) + self.clear() + self._open = False + + + + # shared data handling ################################################### @property - def transaction_data(self): - return self.data + def data(self): + return self._session_data + + @property + def rql_rewriter(self): + return self._rewriter + + @_open_only + def get_shared_data(self, key, default=None, pop=False, txdata=False): + """return value associated to `key` in session data""" + if txdata: + data = self.transaction_data + else: + data = self._session_data + if pop: + return data.pop(key, default) + else: + return data.get(key, default) + + @_open_only + def set_shared_data(self, key, value, txdata=False): + """set value associated to `key` in session data""" + if txdata: + self.transaction_data[key] = value + else: + self._session_data[key] = value def clear(self): """reset internal data""" - self.data = {} + self.transaction_data = {} #: ordered list of operations to be processed on commit/rollback self.pending_operations = [] #: (None, 'precommit', 'postcommit', 'uncommitable') self.commit_state = None self.pruned_hooks_cache = {} + self.local_perm_cache.clear() + self.rewriter = RQLRewriter(self) # Connection Set Management ############################################### @property + @_open_only def cnxset(self): return self._cnxset @cnxset.setter + @_open_only def cnxset(self, new_cnxset): with self._cnxset_tracker: old_cnxset = self._cnxset @@ -461,14 +592,15 @@ if old_cnxset is not None: self._cnxset = None self.ctx_count -= 1 - self._cnxset_tracker.forget(self.transactionid, old_cnxset) + self._cnxset_tracker.forget(self.connectionid, old_cnxset) if new_cnxset is not None: - self._cnxset_tracker.record(self.transactionid, new_cnxset) + self._cnxset_tracker.record(self.connectionid, new_cnxset) self._cnxset = new_cnxset self.ctx_count += 1 - def set_cnxset(self): - """the transaction need a connections set to execute some queries""" + @_open_only + def _set_cnxset(self): + """the connection need a connections set to execute some queries""" if self.cnxset is None: cnxset = self.repo._get_cnxset() try: @@ -483,8 +615,9 @@ raise return self.cnxset - def free_cnxset(self, ignoremode=False): - """the transaction is no longer using its connections set, at least for some time""" + @_open_only + def _free_cnxset(self, ignoremode=False): + """the connection is no longer using its connections set, at least for some time""" # cnxset may be none if no operation has been done since last commit # or rollback cnxset = self.cnxset @@ -495,55 +628,229 @@ cnxset.cnxset_freed() self.repo._free_cnxset(cnxset) + @deprecated('[4.0] cnxset are automatically managed now.' + ' stop using explicit set and free.') + def set_cnxset(self): + self._auto_free_cnx_set = False + return self._set_cnxset() + + @deprecated('[4.0] cnxset are automatically managed now.' + ' stop using explicit set and free.') + def free_cnxset(self, ignoremode=False): + self._auto_free_cnx_set = True + return self._free_cnxset(ignoremode=ignoremode) + + + @property + @contextmanager + @_open_only + def ensure_cnx_set(self): + assert self._cnxset_count >= 0 + if self._cnxset_count == 0: + self._set_cnxset() + try: + self._cnxset_count += 1 + yield + finally: + self._cnxset_count = max(self._cnxset_count - 1, 0) + if self._cnxset_count == 0 and self._auto_free_cnx_set: + self._free_cnxset() + # Entity cache management ################################################# # - # The transaction entity cache as held in tx.data is removed at the - # end of the transaction (commit and rollback) + # The connection entity cache as held in cnx.transaction_data is removed at the + # end of the connection (commit and rollback) # - # XXX transaction level caching may be a pb with multiple repository + # XXX connection level caching may be a pb with multiple repository # instances, but 1. this is probably not the only one :$ and 2. it may be # an acceptable risk. Anyway we could activate it or not according to a # configuration option def set_entity_cache(self, entity): - """Add `entity` to the transaction entity cache""" - ecache = self.data.setdefault('ecache', {}) + """Add `entity` to the connection entity cache""" + # XXX not using _open_only because before at creation time. _set_user + # call this function to cache the Connection user. + if entity.cw_etype != 'CWUser' and not self._open: + raise ProgrammingError('Closed Connection: %s' + % self.connectionid) + ecache = self.transaction_data.setdefault('ecache', {}) ecache.setdefault(entity.eid, entity) + @_open_only def entity_cache(self, eid): """get cache entity for `eid`""" - return self.data['ecache'][eid] + return self.transaction_data['ecache'][eid] + @_open_only def cached_entities(self): """return the whole entity cache""" - return self.data.get('ecache', {}).values() + return self.transaction_data.get('ecache', {}).values() + @_open_only def drop_entity_cache(self, eid=None): """drop entity from the cache If eid is None, the whole cache is dropped""" if eid is None: - self.data.pop('ecache', None) + self.transaction_data.pop('ecache', None) else: - del self.data['ecache'][eid] + del self.transaction_data['ecache'][eid] + + # relations handling ####################################################### + + @_open_only + def add_relation(self, fromeid, rtype, toeid): + """provide direct access to the repository method to add a relation. + + This is equivalent to the following rql query: + + SET X rtype Y WHERE X eid fromeid, T eid toeid + + without read security check but also all the burden of rql execution. + You may use this in hooks when you know both eids of the relation you + want to add. + """ + self.add_relations([(rtype, [(fromeid, toeid)])]) + + @_open_only + def add_relations(self, relations): + '''set many relation using a shortcut similar to the one in add_relation + + relations is a list of 2-uples, the first element of each + 2-uple is the rtype, and the second is a list of (fromeid, + toeid) tuples + ''' + edited_entities = {} + relations_dict = {} + with self.security_enabled(False, False): + for rtype, eids in relations: + if self.vreg.schema[rtype].inlined: + for fromeid, toeid in eids: + if fromeid not in edited_entities: + entity = self.entity_from_eid(fromeid) + edited = EditedEntity(entity) + edited_entities[fromeid] = edited + else: + edited = edited_entities[fromeid] + edited.edited_attribute(rtype, toeid) + else: + relations_dict[rtype] = eids + self.repo.glob_add_relations(self, relations_dict) + for edited in edited_entities.itervalues(): + self.repo.glob_update_entity(self, edited) + + + @_open_only + def delete_relation(self, fromeid, rtype, toeid): + """provide direct access to the repository method to delete a relation. + + This is equivalent to the following rql query: + + DELETE X rtype Y WHERE X eid fromeid, T eid toeid + + without read security check but also all the burden of rql execution. + You may use this in hooks when you know both eids of the relation you + want to delete. + """ + with self.security_enabled(False, False): + if self.vreg.schema[rtype].inlined: + entity = self.entity_from_eid(fromeid) + entity.cw_attr_cache[rtype] = None + self.repo.glob_update_entity(self, entity, set((rtype,))) + else: + self.repo.glob_delete_relation(self, fromeid, rtype, toeid) + + # relations cache handling ################################################# + + @_open_only + def update_rel_cache_add(self, subject, rtype, object, symmetric=False): + self._update_entity_rel_cache_add(subject, rtype, 'subject', object) + if symmetric: + self._update_entity_rel_cache_add(object, rtype, 'subject', subject) + else: + self._update_entity_rel_cache_add(object, rtype, 'object', subject) + + @_open_only + def update_rel_cache_del(self, subject, rtype, object, symmetric=False): + self._update_entity_rel_cache_del(subject, rtype, 'subject', object) + if symmetric: + self._update_entity_rel_cache_del(object, rtype, 'object', object) + else: + self._update_entity_rel_cache_del(object, rtype, 'object', subject) + + @_open_only + def _update_entity_rel_cache_add(self, eid, rtype, role, targeteid): + try: + entity = self.entity_cache(eid) + except KeyError: + return + rcache = entity.cw_relation_cached(rtype, role) + if rcache is not None: + rset, entities = rcache + rset = rset.copy() + entities = list(entities) + rset.rows.append([targeteid]) + if not isinstance(rset.description, list): # else description not set + rset.description = list(rset.description) + rset.description.append([self.describe(targeteid)[0]]) + targetentity = self.entity_from_eid(targeteid) + if targetentity.cw_rset is None: + targetentity.cw_rset = rset + targetentity.cw_row = rset.rowcount + targetentity.cw_col = 0 + rset.rowcount += 1 + entities.append(targetentity) + entity._cw_related_cache['%s_%s' % (rtype, role)] = ( + rset, tuple(entities)) + + @_open_only + def _update_entity_rel_cache_del(self, eid, rtype, role, targeteid): + try: + entity = self.entity_cache(eid) + except KeyError: + return + rcache = entity.cw_relation_cached(rtype, role) + if rcache is not None: + rset, entities = rcache + for idx, row in enumerate(rset.rows): + if row[0] == targeteid: + break + else: + # this may occurs if the cache has been filed by a hook + # after the database update + self.debug('cache inconsistency for %s %s %s %s', eid, rtype, + role, targeteid) + return + rset = rset.copy() + entities = list(entities) + del rset.rows[idx] + if isinstance(rset.description, list): # else description not set + del rset.description[idx] + del entities[idx] + rset.rowcount -= 1 + entity._cw_related_cache['%s_%s' % (rtype, role)] = ( + rset, tuple(entities)) # Tracking of entities added of removed in the transaction ################## + @_open_only def deleted_in_transaction(self, eid): """return True if the entity of the given eid is being deleted in the current transaction """ - return eid in self.data.get('pendingeids', ()) + return eid in self.transaction_data.get('pendingeids', ()) + @_open_only def added_in_transaction(self, eid): """return True if the entity of the given eid is being created in the current transaction """ - return eid in self.data.get('neweids', ()) + return eid in self.transaction_data.get('neweids', ()) # Operation management #################################################### + @_open_only def add_operation(self, operation, index=None): """add an operation to be executed at the end of the transaction""" if index is None: @@ -553,6 +860,15 @@ # Hooks control ########################################################### + @_open_only + def allow_all_hooks_but(self, *categories): + return _hooks_control(self, HOOKS_ALLOW_ALL, *categories) + + @_open_only + def deny_all_hooks_but(self, *categories): + return _hooks_control(self, HOOKS_DENY_ALL, *categories) + + @_open_only def disable_hook_categories(self, *categories): """disable the given hook categories: @@ -572,6 +888,7 @@ disabledcats |= changes # changes is small hence faster return tuple(changes) + @_open_only def enable_hook_categories(self, *categories): """enable the given hook categories: @@ -591,6 +908,7 @@ disabledcats -= changes # changes is small hence faster return tuple(changes) + @_open_only def is_hook_category_activated(self, category): """return a boolean telling if the given category is currently activated or not @@ -599,6 +917,7 @@ return category in self.enabled_hook_cats return category not in self.disabled_hook_cats + @_open_only def is_hook_activated(self, hook): """return a boolean telling if the given hook class is currently activated or not @@ -606,11 +925,18 @@ return self.is_hook_category_activated(hook.category) # Security management ##################################################### + + @_open_only + def security_enabled(self, read=None, write=None): + return _security_enabled(self, read=read, write=write) + @property + @_open_only def read_security(self): return self._read_security @read_security.setter + @_open_only def read_security(self, activated): oldmode = self._read_security self._read_security = activated @@ -636,25 +962,32 @@ # undo support ############################################################ + @_open_only def ertype_supports_undo(self, ertype): return self.undo_actions and ertype not in NO_UNDO_TYPES + @_open_only def transaction_uuid(self, set=True): - uuid = self.data.get('tx_uuid') + uuid = self.transaction_data.get('tx_uuid') if set and uuid is None: - raise KeyError + self.transaction_data['tx_uuid'] = uuid = uuid4().hex + self.repo.system_source.start_undoable_transaction(self, uuid) return uuid + @_open_only def transaction_inc_action_counter(self): - num = self.data.setdefault('tx_action_count', 0) + 1 - self.data['tx_action_count'] = num + num = self.transaction_data.setdefault('tx_action_count', 0) + 1 + self.transaction_data['tx_action_count'] = num return num # db-api like interface ################################################### + @_open_only def source_defs(self): return self.repo.source_defs() + @_with_cnx_set + @_open_only def describe(self, eid, asdict=False): """return a tuple (type, sourceuri, extid) for the entity with id <eid>""" metas = self.repo.type_and_source_from_eid(eid, self) @@ -663,12 +996,170 @@ # XXX :-1 for cw compat, use asdict=True for full information return metas[:-1] + @_with_cnx_set + @_open_only def source_from_eid(self, eid): """return the source where the entity with id <eid> is located""" return self.repo.source_from_eid(eid, self) + # core method ############################################################# + + @_with_cnx_set + @_open_only + def execute(self, rql, kwargs=None, eid_key=None, build_descr=True): + """db-api like method directly linked to the querier execute method. + + See :meth:`cubicweb.dbapi.Cursor.execute` documentation. + """ + self._session_timestamp.touch() + if eid_key is not None: + warn('[3.8] eid_key is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) + rset = self._execute(self, rql, kwargs, build_descr) + rset.req = self + self._session_timestamp.touch() + return rset + + @_open_only + def rollback(self, free_cnxset=True, reset_pool=None): + """rollback the current transaction""" + if reset_pool is not None: + warn('[3.13] use free_cnxset argument instead for reset_pool', + DeprecationWarning, stacklevel=2) + free_cnxset = reset_pool + cnxset = self.cnxset + if cnxset is None: + self.clear() + self._session_timestamp.touch() + self.debug('rollback transaction %s done (no db activity)', self.connectionid) + return + try: + # by default, operations are executed with security turned off + with self.security_enabled(False, False): + while self.pending_operations: + try: + operation = self.pending_operations.pop(0) + operation.handle_event('rollback_event') + except BaseException: + self.critical('rollback error', exc_info=sys.exc_info()) + continue + cnxset.rollback() + self.debug('rollback for transaction %s done', self.connectionid) + finally: + self._session_timestamp.touch() + if free_cnxset: + self.free_cnxset(ignoremode=True) + self.clear() + + @_open_only + def commit(self, free_cnxset=True, reset_pool=None): + """commit the current session's transaction""" + if reset_pool is not None: + warn('[3.13] use free_cnxset argument instead for reset_pool', + DeprecationWarning, stacklevel=2) + free_cnxset = reset_pool + if self.cnxset is None: + assert not self.pending_operations + self.clear() + self._session_timestamp.touch() + self.debug('commit transaction %s done (no db activity)', self.connectionid) + return + cstate = self.commit_state + if cstate == 'uncommitable': + raise QueryError('transaction must be rollbacked') + if cstate is not None: + return + # on rollback, an operation should have the following state + # information: + # - processed by the precommit/commit event or not + # - if processed, is it the failed operation + debug = server.DEBUG & server.DBG_OPS + try: + # by default, operations are executed with security turned off + with self.security_enabled(False, False): + processed = [] + self.commit_state = 'precommit' + if debug: + print self.commit_state, '*' * 20 + try: + while self.pending_operations: + operation = self.pending_operations.pop(0) + operation.processed = 'precommit' + processed.append(operation) + if debug: + print operation + operation.handle_event('precommit_event') + self.pending_operations[:] = processed + self.debug('precommit transaction %s done', self.connectionid) + except BaseException: + # if error on [pre]commit: + # + # * set .failed = True on the operation causing the failure + # * call revert<event>_event on processed operations + # * call rollback_event on *all* operations + # + # that seems more natural than not calling rollback_event + # for processed operations, and allow generic rollback + # instead of having to implements rollback, revertprecommit + # and revertcommit, that will be enough in mont case. + operation.failed = True + if debug: + print self.commit_state, '*' * 20 + for operation in reversed(processed): + if debug: + print operation + try: + operation.handle_event('revertprecommit_event') + except BaseException: + self.critical('error while reverting precommit', + exc_info=True) + # XXX use slice notation since self.pending_operations is a + # read-only property. + self.pending_operations[:] = processed + self.pending_operations + self.rollback(free_cnxset) + raise + self.cnxset.commit() + self.commit_state = 'postcommit' + if debug: + print self.commit_state, '*' * 20 + while self.pending_operations: + operation = self.pending_operations.pop(0) + if debug: + print operation + operation.processed = 'postcommit' + try: + operation.handle_event('postcommit_event') + except BaseException: + self.critical('error while postcommit', + exc_info=sys.exc_info()) + self.debug('postcommit transaction %s done', self.connectionid) + return self.transaction_uuid(set=False) + finally: + self._session_timestamp.touch() + if free_cnxset: + self.free_cnxset(ignoremode=True) + self.clear() + # resource accessors ###################################################### + @_with_cnx_set + @_open_only + def call_service(self, regid, **kwargs): + json.dumps(kwargs) # This line ensure that people use serialisable + # argument for call service. this is very important + # to enforce that from start to make sure RPC + # version is available. + self.info('calling service %s', regid) + service = self.vreg['services'].select(regid, self, **kwargs) + result = service.call(**kwargs) + json.dumps(result) # This line ensure that service have serialisable + # output. this is very important to enforce that + # from start to make sure RPC version is + # available. + return result + + @_with_cnx_set + @_open_only def system_sql(self, sql, args=None, rollback_on_failure=True): """return a sql cursor on the system database""" if sql.split(None, 1)[0].upper() != 'SELECT': @@ -683,6 +1174,7 @@ self.cnxset.reconnect(source) return source.doexec(self, sql, args, rollback=rollback_on_failure) + @_open_only def rtype_eids_rdef(self, rtype, eidfrom, eidto): # use type_and_source_from_eid instead of type_from_eid for optimization # (avoid two extra methods call) @@ -691,31 +1183,49 @@ return self.vreg.schema.rschema(rtype).rdefs[(subjtype, objtype)] -def tx_attr(attr_name, writable=False): - """return a property to forward attribute access to transaction. +def cnx_attr(attr_name, writable=False): + """return a property to forward attribute access to connection. This is to be used by session""" args = {} - def attr_from_tx(session): - return getattr(session._tx, attr_name) - args['fget'] = attr_from_tx + @deprecated('[4.0] use a Connection object instead') + def attr_from_cnx(session): + return getattr(session._cnx, attr_name) + args['fget'] = attr_from_cnx if writable: + @deprecated('[4.0] use a Connection object instead') def write_attr(session, value): - return setattr(session._tx, attr_name, value) + return setattr(session._cnx, attr_name, value) args['fset'] = write_attr return property(**args) -def tx_meth(meth_name): - """return a function forwarding calls to transaction. +def cnx_meth(meth_name): + """return a function forwarding calls to connection. This is to be used by session""" - def meth_from_tx(session, *args, **kwargs): - return getattr(session._tx, meth_name)(*args, **kwargs) - meth_from_tx.__doc__ = getattr(Transaction, meth_name).__doc__ - return meth_from_tx + @deprecated('[4.0] use a Connection object instead') + def meth_from_cnx(session, *args, **kwargs): + result = getattr(session._cnx, meth_name)(*args, **kwargs) + if getattr(result, '_cw', None) is not None: + result._cw = session + return result + meth_from_cnx.__doc__ = getattr(Connection, meth_name).__doc__ + return meth_from_cnx + +class Timestamp(object): + + def __init__(self): + self.value = time() + + def touch(self): + self.value = time() + + def __float__(self): + return float(self.value) -class Session(RequestSessionBase): +class Session(RequestSessionBase): # XXX repoapi: stop being a + # RequestSessionBase at some point """Repository user session This ties all together: @@ -733,23 +1243,23 @@ :attr:`data` is a dictionary containing shared data, used to communicate extra information between the client and the repository - :attr:`_txs` is a dictionary of :class:`TransactionData` instance, one - for each running transaction. The key is the transaction id. By default - the transaction id is the thread name but it can be otherwise (per dbapi + :attr:`_cnxs` is a dictionary of :class:`Connection` instance, one + for each running connection. The key is the connection id. By default + the connection id is the thread name but it can be otherwise (per dbapi cursor for instance, or per thread name *from another process*). - :attr:`__threaddata` is a thread local storage whose `tx` attribute - refers to the proper instance of :class:`Transaction` according to the - transaction. + :attr:`__threaddata` is a thread local storage whose `cnx` attribute + refers to the proper instance of :class:`Connection` according to the + connection. - You should not have to use neither :attr:`_tx` nor :attr:`__threaddata`, - simply access transaction data transparently through the :attr:`_tx` + You should not have to use neither :attr:`_cnx` nor :attr:`__threaddata`, + simply access connection data transparently through the :attr:`_cnx` property. Also, you usually don't have to access it directly since current - transaction's data may be accessed/modified through properties / methods: + connection's data may be accessed/modified through properties / methods: - :attr:`transaction_data`, similarly to :attr:`data`, is a dictionary + :attr:`connection_data`, similarly to :attr:`data`, is a dictionary containing some shared data that should be cleared at the end of the - transaction. Hooks and operations may put arbitrary data in there, and + connection. Hooks and operations may put arbitrary data in there, and this may also be used as a communication channel between the client and the repository. @@ -758,7 +1268,7 @@ .. automethod:: cubicweb.server.session.Session.added_in_transaction .. automethod:: cubicweb.server.session.Session.deleted_in_transaction - Transaction state information: + Connection state information: :attr:`running_dbapi_query`, boolean flag telling if the executing query is coming from a dbapi connection or is a query from within the repository @@ -831,10 +1341,10 @@ def __init__(self, user, repo, cnxprops=None, _id=None): super(Session, self).__init__(repo.vreg) - self.id = _id or make_uid(unormalize(user.login).encode('UTF8')) - self.user = user + self.sessionid = _id or make_uid(unormalize(user.login).encode('UTF8')) + self.user = user # XXX repoapi: deprecated and store only a login. self.repo = repo - self.timestamp = time() + self._timestamp = Timestamp() self.default_mode = 'read' # short cut to querier .execute method self._execute = repo.querier.execute @@ -844,10 +1354,12 @@ # i18n initialization self.set_language(user.prefered_language()) ### internals - # Transaction of this section - self._txs = {} + # Connection of this section + self._cnxs = {} # XXX repoapi: remove this when nobody use the session + # as a Connection # Data local to the thread - self.__threaddata = threading.local() + self.__threaddata = threading.local() # XXX repoapi: remove this when + # nobody use the session as a Connection self._cnxset_tracker = CnxSetTracker() self._closed = False self._lock = threading.RLock() @@ -855,42 +1367,80 @@ def __unicode__(self): return '<session %s (%s 0x%x)>' % ( unicode(self.user.login), self.id, id(self)) + @property + def timestamp(self): + return float(self._timestamp) - def get_tx(self, txid): - """return the <txid> transaction attached to this session + @property + @deprecated('[4.0] session.id is deprecated. use session.sessionid') + def id(self): + return self.sessionid + + @property + def login(self): + return self.user.login - Transaction is created if necessary""" - with self._lock: # no transaction exist with the same id + def new_cnx(self): + """Return a new Connection object linked to the session + + The returned Connection will *not* be managed by the Session. + """ + return Connection(self) + + def _get_cnx(self, cnxid): + """return the <cnxid> connection attached to this session + + Connection is created if necessary""" + with self._lock: # no connection exist with the same id try: if self.closed: - raise SessionClosedError('try to access connections set on a closed session %s' % self.id) - tx = self._txs[txid] + raise SessionClosedError('try to access connections set on' + ' a closed session %s' % self.id) + cnx = self._cnxs[cnxid] + assert cnx._session_handled except KeyError: - rewriter = RQLRewriter(self) - tx = Transaction(txid, self, rewriter) - self._txs[txid] = tx - return tx + cnx = Connection(self, cnxid=cnxid, session_handled=True) + self._cnxs[cnxid] = cnx + cnx.__enter__() + return cnx - def set_tx(self, txid=None): - """set the default transaction of the current thread to <txid> + def _close_cnx(self, cnx): + """Close a Connection related to a session""" + assert cnx._session_handled + cnx.__exit__() + self._cnxs.pop(cnx.connectionid, None) + try: + if self.__threaddata.cnx is cnx: + del self.__threaddata.cnx + except AttributeError: + pass - Transaction is created if necessary""" - if txid is None: - txid = threading.currentThread().getName() - self.__threaddata.tx = self.get_tx(txid) + def set_cnx(self, cnxid=None): + # XXX repoapi: remove this when nobody use the session as a Connection + """set the default connection of the current thread to <cnxid> + + Connection is created if necessary""" + if cnxid is None: + cnxid = threading.currentThread().getName() + cnx = self._get_cnx(cnxid) + # New style session should not be accesed through the session. + assert cnx._session_handled + self.__threaddata.cnx = cnx @property - def _tx(self): - """default transaction for current session in current thread""" + def _cnx(self): + """default connection for current session in current thread""" try: - return self.__threaddata.tx + return self.__threaddata.cnx except AttributeError: - self.set_tx() - return self.__threaddata.tx + self.set_cnx() + return self.__threaddata.cnx + @deprecated('[4.0] use a Connection object instead') def get_option_value(self, option, foreid=None): return self.repo.get_option_value(option, foreid) + @deprecated('[4.0] use a Connection object instead') def transaction(self, free_cnxset=True): """return context manager to enter a transaction for the session: when exiting the `with` block on exception, call `session.rollback()`, else @@ -901,184 +1451,55 @@ """ return transaction(self, free_cnxset) - - @deprecated('[3.17] do not use hijack_user. create new Session object') - def hijack_user(self, user): - """return a fake request/session using specified user""" - session = Session(user, self.repo) - tx = session._tx - tx.cnxset = self.cnxset - # share pending_operations, else operation added in the hi-jacked - # session such as SendMailOp won't ever be processed - tx.pending_operations = self.pending_operations - # everything in tx.data should be copied back but the entity - # type cache we don't want to avoid security pb - tx.data = self._tx.data.copy() - tx.data.pop('ecache', None) - return session - - def add_relation(self, fromeid, rtype, toeid): - """provide direct access to the repository method to add a relation. - - This is equivalent to the following rql query: - - SET X rtype Y WHERE X eid fromeid, T eid toeid - - without read security check but also all the burden of rql execution. - You may use this in hooks when you know both eids of the relation you - want to add. - """ - self.add_relations([(rtype, [(fromeid, toeid)])]) - - def add_relations(self, relations): - '''set many relation using a shortcut similar to the one in add_relation - - relations is a list of 2-uples, the first element of each - 2-uple is the rtype, and the second is a list of (fromeid, - toeid) tuples - ''' - edited_entities = {} - relations_dict = {} - with self.security_enabled(False, False): - for rtype, eids in relations: - if self.vreg.schema[rtype].inlined: - for fromeid, toeid in eids: - if fromeid not in edited_entities: - entity = self.entity_from_eid(fromeid) - edited = EditedEntity(entity) - edited_entities[fromeid] = edited - else: - edited = edited_entities[fromeid] - edited.edited_attribute(rtype, toeid) - else: - relations_dict[rtype] = eids - self.repo.glob_add_relations(self, relations_dict) - for edited in edited_entities.itervalues(): - self.repo.glob_update_entity(self, edited) - - - def delete_relation(self, fromeid, rtype, toeid): - """provide direct access to the repository method to delete a relation. - - This is equivalent to the following rql query: - - DELETE X rtype Y WHERE X eid fromeid, T eid toeid - - without read security check but also all the burden of rql execution. - You may use this in hooks when you know both eids of the relation you - want to delete. - """ - with self.security_enabled(False, False): - if self.vreg.schema[rtype].inlined: - entity = self.entity_from_eid(fromeid) - entity.cw_attr_cache[rtype] = None - self.repo.glob_update_entity(self, entity, set((rtype,))) - else: - self.repo.glob_delete_relation(self, fromeid, rtype, toeid) + add_relation = cnx_meth('add_relation') + add_relations = cnx_meth('add_relations') + delete_relation = cnx_meth('delete_relation') # relations cache handling ################################################# - def update_rel_cache_add(self, subject, rtype, object, symmetric=False): - self._update_entity_rel_cache_add(subject, rtype, 'subject', object) - if symmetric: - self._update_entity_rel_cache_add(object, rtype, 'subject', subject) - else: - self._update_entity_rel_cache_add(object, rtype, 'object', subject) - - def update_rel_cache_del(self, subject, rtype, object, symmetric=False): - self._update_entity_rel_cache_del(subject, rtype, 'subject', object) - if symmetric: - self._update_entity_rel_cache_del(object, rtype, 'object', object) - else: - self._update_entity_rel_cache_del(object, rtype, 'object', subject) - - def _update_entity_rel_cache_add(self, eid, rtype, role, targeteid): - try: - entity = self.entity_cache(eid) - except KeyError: - return - rcache = entity.cw_relation_cached(rtype, role) - if rcache is not None: - rset, entities = rcache - rset = rset.copy() - entities = list(entities) - rset.rows.append([targeteid]) - if not isinstance(rset.description, list): # else description not set - rset.description = list(rset.description) - rset.description.append([self.describe(targeteid)[0]]) - targetentity = self.entity_from_eid(targeteid) - if targetentity.cw_rset is None: - targetentity.cw_rset = rset - targetentity.cw_row = rset.rowcount - targetentity.cw_col = 0 - rset.rowcount += 1 - entities.append(targetentity) - entity._cw_related_cache['%s_%s' % (rtype, role)] = ( - rset, tuple(entities)) - - def _update_entity_rel_cache_del(self, eid, rtype, role, targeteid): - try: - entity = self.entity_cache(eid) - except KeyError: - return - rcache = entity.cw_relation_cached(rtype, role) - if rcache is not None: - rset, entities = rcache - for idx, row in enumerate(rset.rows): - if row[0] == targeteid: - break - else: - # this may occurs if the cache has been filed by a hook - # after the database update - self.debug('cache inconsistency for %s %s %s %s', eid, rtype, - role, targeteid) - return - rset = rset.copy() - entities = list(entities) - del rset.rows[idx] - if isinstance(rset.description, list): # else description not set - del rset.description[idx] - del entities[idx] - rset.rowcount -= 1 - entity._cw_related_cache['%s_%s' % (rtype, role)] = ( - rset, tuple(entities)) + update_rel_cache_add = cnx_meth('update_rel_cache_add') + update_rel_cache_del = cnx_meth('update_rel_cache_del') # resource accessors ###################################################### - system_sql = tx_meth('system_sql') - deleted_in_transaction = tx_meth('deleted_in_transaction') - added_in_transaction = tx_meth('added_in_transaction') - rtype_eids_rdef = tx_meth('rtype_eids_rdef') + system_sql = cnx_meth('system_sql') + deleted_in_transaction = cnx_meth('deleted_in_transaction') + added_in_transaction = cnx_meth('added_in_transaction') + rtype_eids_rdef = cnx_meth('rtype_eids_rdef') # security control ######################################################### - + @deprecated('[4.0] use a Connection object instead') def security_enabled(self, read=None, write=None): - return _security_enabled(self, read=read, write=write) + return _session_security_enabled(self, read=read, write=write) - read_security = tx_attr('read_security', writable=True) - write_security = tx_attr('write_security', writable=True) - running_dbapi_query = tx_attr('running_dbapi_query') + read_security = cnx_attr('read_security', writable=True) + write_security = cnx_attr('write_security', writable=True) + running_dbapi_query = cnx_attr('running_dbapi_query') # hooks activation control ################################################# # all hooks should be activated during normal execution + + @deprecated('[4.0] use a Connection object instead') def allow_all_hooks_but(self, *categories): - return _hooks_control(self, HOOKS_ALLOW_ALL, *categories) + return _session_hooks_control(self, HOOKS_ALLOW_ALL, *categories) + @deprecated('[4.0] use a Connection object instead') def deny_all_hooks_but(self, *categories): - return _hooks_control(self, HOOKS_DENY_ALL, *categories) - - hooks_mode = tx_attr('hooks_mode') + return _session_hooks_control(self, HOOKS_DENY_ALL, *categories) - disabled_hook_categories = tx_attr('disabled_hook_cats') - enabled_hook_categories = tx_attr('enabled_hook_cats') - disable_hook_categories = tx_meth('disable_hook_categories') - enable_hook_categories = tx_meth('enable_hook_categories') - is_hook_category_activated = tx_meth('is_hook_category_activated') - is_hook_activated = tx_meth('is_hook_activated') + hooks_mode = cnx_attr('hooks_mode') + + disabled_hook_categories = cnx_attr('disabled_hook_cats') + enabled_hook_categories = cnx_attr('enabled_hook_cats') + disable_hook_categories = cnx_meth('disable_hook_categories') + enable_hook_categories = cnx_meth('enable_hook_categories') + is_hook_category_activated = cnx_meth('is_hook_category_activated') + is_hook_activated = cnx_meth('is_hook_activated') # connection management ################################################### + @deprecated('[4.0] use a Connection object instead') def keep_cnxset_mode(self, mode): """set `mode`, e.g. how the session will keep its connections set: @@ -1099,16 +1520,17 @@ else: # mode == 'write' self.default_mode = 'read' - mode = tx_attr('mode', writable=True) - commit_state = tx_attr('commit_state', writable=True) + mode = cnx_attr('mode', writable=True) + commit_state = cnx_attr('commit_state', writable=True) @property + @deprecated('[4.0] use a Connection object instead') def cnxset(self): """connections set, set according to transaction mode for each query""" if self._closed: self.free_cnxset(True) raise SessionClosedError('try to access connections set on a closed session %s' % self.id) - return self._tx.cnxset + return self._cnx.cnxset def set_cnxset(self): """the session need a connections set to execute some queries""" @@ -1116,20 +1538,27 @@ if self._closed: self.free_cnxset(True) raise SessionClosedError('try to set connections set on a closed session %s' % self.id) - return self._tx.set_cnxset() - free_cnxset = tx_meth('free_cnxset') + return self._cnx.set_cnxset() + free_cnxset = cnx_meth('free_cnxset') + ensure_cnx_set = cnx_attr('ensure_cnx_set') def _touch(self): """update latest session usage timestamp and reset mode to read""" - self.timestamp = time() - self.local_perm_cache.clear() # XXX simply move in tx.data, no? + self._timestamp.touch() + + local_perm_cache = cnx_attr('local_perm_cache') + @local_perm_cache.setter + def local_perm_cache(self, value): + #base class assign an empty dict:-( + assert value == {} + pass # shared data handling ################################################### def get_shared_data(self, key, default=None, pop=False, txdata=False): """return value associated to `key` in session data""" if txdata: - data = self._tx.data + return self._cnx.get_shared_data(key, default, pop, txdata=True) else: data = self.data if pop: @@ -1140,47 +1569,39 @@ def set_shared_data(self, key, value, txdata=False): """set value associated to `key` in session data""" if txdata: - self._tx.data[key] = value + return self._cnx.set_shared_data(key, value, txdata=True) else: self.data[key] = value # server-side service call ################################################# - def call_service(self, regid, async=False, **kwargs): - return self.repo._call_service_with_session(self, regid, async, - **kwargs) - + call_service = cnx_meth('call_service') # request interface ####################################################### @property + @deprecated('[4.0] use a Connection object instead') def cursor(self): """return a rql cursor""" return self - set_entity_cache = tx_meth('set_entity_cache') - entity_cache = tx_meth('entity_cache') - cache_entities = tx_meth('cached_entities') - drop_entity_cache = tx_meth('drop_entity_cache') + set_entity_cache = cnx_meth('set_entity_cache') + entity_cache = cnx_meth('entity_cache') + cache_entities = cnx_meth('cached_entities') + drop_entity_cache = cnx_meth('drop_entity_cache') - def from_controller(self): - """return the id (string) of the controller issuing the request (no - sense here, always return 'view') - """ - return 'view' - - source_defs = tx_meth('source_defs') - describe = tx_meth('describe') - source_from_eid = tx_meth('source_from_eid') + source_defs = cnx_meth('source_defs') + describe = cnx_meth('describe') + source_from_eid = cnx_meth('source_from_eid') - def execute(self, rql, kwargs=None, build_descr=True): + @deprecated('[4.0] use a Connection object instead') + def execute(self, *args, **kwargs): """db-api like method directly linked to the querier execute method. See :meth:`cubicweb.dbapi.Cursor.execute` documentation. """ - self.timestamp = time() # update timestamp - rset = self._execute(self, rql, kwargs, build_descr) + rset = self._cnx.execute(*args, **kwargs) rset.req = self return rset @@ -1190,150 +1611,36 @@ by _touch """ try: - tx = self.__threaddata.tx + cnx = self.__threaddata.cnx except AttributeError: pass else: if free_cnxset: self.free_cnxset() - if tx.ctx_count == 0: - self._clear_thread_storage(tx) + if cnx.ctx_count == 0: + self._close_cnx(cnx) else: - self._clear_tx_storage(tx) + cnx.clear() else: - self._clear_tx_storage(tx) + cnx.clear() - def _clear_thread_storage(self, tx): - self._txs.pop(tx.transactionid, None) - try: - del self.__threaddata.tx - except AttributeError: - pass - - def _clear_tx_storage(self, tx): - tx.clear() - tx._rewriter = RQLRewriter(self) - + @deprecated('[4.0] use a Connection object instead') def commit(self, free_cnxset=True, reset_pool=None): """commit the current session's transaction""" - if reset_pool is not None: - warn('[3.13] use free_cnxset argument instead for reset_pool', - DeprecationWarning, stacklevel=2) - free_cnxset = reset_pool - if self.cnxset is None: - assert not self.pending_operations - self._clear_thread_data() - self._touch() - self.debug('commit session %s done (no db activity)', self.id) - return - cstate = self.commit_state + cstate = self._cnx.commit_state if cstate == 'uncommitable': raise QueryError('transaction must be rolled back') - if cstate is not None: - return - # on rollback, an operation should have the following state - # information: - # - processed by the precommit/commit event or not - # - if processed, is it the failed operation - debug = server.DEBUG & server.DBG_OPS try: - # by default, operations are executed with security turned off - with self.security_enabled(False, False): - processed = [] - self.commit_state = 'precommit' - if debug: - print self.commit_state, '*' * 20 - try: - while self.pending_operations: - operation = self.pending_operations.pop(0) - operation.processed = 'precommit' - processed.append(operation) - if debug: - print operation - operation.handle_event('precommit_event') - self.pending_operations[:] = processed - self.debug('precommit session %s done', self.id) - except BaseException: - # save exception context, it may be clutered below by - # exception in revert_* event - exc_info = sys.exc_info() - # if error on [pre]commit: - # - # * set .failed = True on the operation causing the failure - # * call revert<event>_event on processed operations - # * call rollback_event on *all* operations - # - # that seems more natural than not calling rollback_event - # for processed operations, and allow generic rollback - # instead of having to implements rollback, revertprecommit - # and revertcommit, that will be enough in mont case. - operation.failed = True - if debug: - print self.commit_state, '*' * 20 - for operation in reversed(processed): - if debug: - print operation - try: - operation.handle_event('revertprecommit_event') - except BaseException: - self.critical('error while reverting precommit', - exc_info=True) - # XXX use slice notation since self.pending_operations is a - # read-only property. - self.pending_operations[:] = processed + self.pending_operations - self.rollback(free_cnxset) - raise exc_info[0], exc_info[1], exc_info[2] - self.cnxset.commit() - self.commit_state = 'postcommit' - if debug: - print self.commit_state, '*' * 20 - while self.pending_operations: - operation = self.pending_operations.pop(0) - if debug: - print operation - operation.processed = 'postcommit' - try: - operation.handle_event('postcommit_event') - except BaseException: - self.critical('error while postcommit', - exc_info=sys.exc_info()) - self.debug('postcommit session %s done', self.id) - return self.transaction_uuid(set=False) + return self._cnx.commit(free_cnxset, reset_pool) finally: - self._touch() - if free_cnxset: - self.free_cnxset(ignoremode=True) self._clear_thread_data(free_cnxset) - def rollback(self, free_cnxset=True, reset_pool=None): + @deprecated('[4.0] use a Connection object instead') + def rollback(self, free_cnxset=True, **kwargs): """rollback the current session's transaction""" - if reset_pool is not None: - warn('[3.13] use free_cnxset argument instead for reset_pool', - DeprecationWarning, stacklevel=2) - free_cnxset = reset_pool - # don't use self.cnxset, rollback may be called with _closed == True - cnxset = self._tx.cnxset - if cnxset is None: - self._clear_thread_data() - self._touch() - self.debug('rollback session %s done (no db activity)', self.id) - return try: - # by default, operations are executed with security turned off - with self.security_enabled(False, False): - while self.pending_operations: - try: - operation = self.pending_operations.pop(0) - operation.handle_event('rollback_event') - except BaseException: - self.critical('rollback error', exc_info=sys.exc_info()) - continue - cnxset.rollback() - self.debug('rollback for session %s done', self.id) + return self._cnx.rollback(free_cnxset, **kwargs) finally: - self._touch() - if free_cnxset: - self.free_cnxset(ignoremode=True) self._clear_thread_data(free_cnxset) def close(self): @@ -1343,62 +1650,63 @@ self._closed = True tracker.close() self.rollback() - self.debug('waiting for open transaction of session: %s', self) + self.debug('waiting for open connection of session: %s', self) timeout = 10 pendings = tracker.wait(timeout) if pendings: - self.error('%i transaction still alive after 10 seconds, will close ' + self.error('%i connection still alive after 10 seconds, will close ' 'session anyway', len(pendings)) - for txid in pendings: - tx = self._txs.get(txid) - if tx is not None: - # drop tx.cnxset + for cnxid in pendings: + cnx = self._cnxs.get(cnxid) + if cnx is not None: + # drop cnx.cnxset with tracker: try: - cnxset = tx.cnxset + cnxset = cnx.cnxset if cnxset is None: continue - tx.cnxset = None + cnx.cnxset = None except RuntimeError: msg = 'issue while force free of cnxset in %s' - self.error(msg, tx) + self.error(msg, cnx) # cnxset.reconnect() do an hard reset of the cnxset # it force it to be freed cnxset.reconnect() self.repo._free_cnxset(cnxset) del self.__threaddata - del self._txs + del self._cnxs @property def closed(self): - return not hasattr(self, '_txs') + return not hasattr(self, '_cnxs') # transaction data/operations management ################################## - transaction_data = tx_attr('data') - pending_operations = tx_attr('pending_operations') - pruned_hooks_cache = tx_attr('pruned_hooks_cache') - add_operation = tx_meth('add_operation') + transaction_data = cnx_attr('transaction_data') + pending_operations = cnx_attr('pending_operations') + pruned_hooks_cache = cnx_attr('pruned_hooks_cache') + add_operation = cnx_meth('add_operation') # undo support ############################################################ - ertype_supports_undo = tx_meth('ertype_supports_undo') - transaction_inc_action_counter = tx_meth('transaction_inc_action_counter') - - def transaction_uuid(self, set=True): - try: - return self._tx.transaction_uuid(set=set) - except KeyError: - self._tx.data['tx_uuid'] = uuid = uuid4().hex - self.repo.system_source.start_undoable_transaction(self, uuid) - return uuid + ertype_supports_undo = cnx_meth('ertype_supports_undo') + transaction_inc_action_counter = cnx_meth('transaction_inc_action_counter') + transaction_uuid = cnx_meth('transaction_uuid') # querier helpers ######################################################### - rql_rewriter = tx_attr('_rewriter') + rql_rewriter = cnx_attr('_rewriter') # deprecated ############################################################### + @property + @deprecated('[4.0] use a Connection object instead') + def anonymous_session(self): + # XXX for now, anonymous-user is a web side option. + # It will only be present inside all-in-one instance. + # there is plan to move it down to global config. + return self.user.login == self.repo.config.get('anonymous-user') + @deprecated('[3.13] use getattr(session.rtype_eids_rdef(rtype, eidfrom, eidto), prop)') def schema_rproperty(self, rtype, eidfrom, eidto, rprop): return getattr(self.rtype_eids_rdef(rtype, eidfrom, eidto), rprop) @@ -1451,7 +1759,7 @@ if self.repo.shutting_down: self.free_cnxset(True) raise ShuttingDown('repository is shutting down') - return self._tx.cnxset + return self._cnx.cnxset class InternalManager(object): @@ -1463,6 +1771,7 @@ self.eid = -1 self.login = u'__internal_manager__' self.properties = {} + self.groups = set(['managers']) def matching_groups(self, groups): return 1 @@ -1500,3 +1809,4 @@ from logging import getLogger from cubicweb import set_log_methods set_log_methods(Session, getLogger('cubicweb.session')) +set_log_methods(Connection, getLogger('cubicweb.session'))
--- a/server/sources/native.py Fri Jan 10 18:31:07 2014 +0100 +++ b/server/sources/native.py Mon Jan 13 13:47:47 2014 +0100 @@ -972,34 +972,35 @@ def add_info(self, session, entity, source, extid, complete): """add type and source info for an eid into the system table""" - # begin by inserting eid/type/source/extid into the entities table - if extid is not None: - assert isinstance(extid, str) - extid = b64encode(extid) - uri = 'system' if source.copy_based_source else source.uri - attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid, - 'source': uri, 'asource': source.uri, 'mtime': datetime.utcnow()} - self._handle_insert_entity_sql(session, self.sqlgen.insert('entities', attrs), attrs) - # insert core relations: is, is_instance_of and cw_source - try: - self._handle_is_relation_sql(session, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, eschema_eid(session, entity.e_schema))) - except IndexError: - # during schema serialization, skip - pass - else: - for eschema in entity.e_schema.ancestors() + [entity.e_schema]: - self._handle_is_relation_sql(session, - 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, eschema_eid(session, eschema))) - if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10 - self._handle_is_relation_sql(session, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, source.eid)) - # now we can update the full text index - if self.do_fti and self.need_fti_indexation(entity.cw_etype): - if complete: - entity.complete(entity.e_schema.indexable_attributes()) - self.index_entity(session, entity=entity) + with session.ensure_cnx_set: + # begin by inserting eid/type/source/extid into the entities table + if extid is not None: + assert isinstance(extid, str) + extid = b64encode(extid) + uri = 'system' if source.copy_based_source else source.uri + attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid, + 'source': uri, 'asource': source.uri, 'mtime': datetime.utcnow()} + self._handle_insert_entity_sql(session, self.sqlgen.insert('entities', attrs), attrs) + # insert core relations: is, is_instance_of and cw_source + try: + self._handle_is_relation_sql(session, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)', + (entity.eid, eschema_eid(session, entity.e_schema))) + except IndexError: + # during schema serialization, skip + pass + else: + for eschema in entity.e_schema.ancestors() + [entity.e_schema]: + self._handle_is_relation_sql(session, + 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)', + (entity.eid, eschema_eid(session, eschema))) + if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10 + self._handle_is_relation_sql(session, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)', + (entity.eid, source.eid)) + # now we can update the full text index + if self.do_fti and self.need_fti_indexation(entity.cw_etype): + if complete: + entity.complete(entity.e_schema.indexable_attributes()) + self.index_entity(session, entity=entity) def update_info(self, session, entity, need_fti_update): """mark entity as being modified, fulltext reindex if needed""" @@ -1222,17 +1223,19 @@ raise `NoSuchTransaction` if there is no such transaction of if the session's user isn't allowed to see it. """ - restr = {'tx_uuid': txuuid} - sql = self.sqlgen.select('transactions', restr, ('tx_time', 'tx_user')) - cu = self.doexec(session, sql, restr) - try: - time, ueid = cu.fetchone() - except TypeError: - raise tx.NoSuchTransaction(txuuid) - if not (session.user.is_in_group('managers') - or session.user.eid == ueid): - raise tx.NoSuchTransaction(txuuid) - return time, ueid + with session.ensure_cnx_set: + restr = {'tx_uuid': txuuid} + sql = self.sqlgen.select('transactions', restr, + ('tx_time', 'tx_user')) + cu = self.doexec(session, sql, restr) + try: + time, ueid = cu.fetchone() + except TypeError: + raise tx.NoSuchTransaction(txuuid) + if not (session.user.is_in_group('managers') + or session.user.eid == ueid): + raise tx.NoSuchTransaction(txuuid) + return time, ueid def _reedit_entity(self, entity, changes, err): session = entity._cw
--- a/server/test/unittest_migractions.py Fri Jan 10 18:31:07 2014 +0100 +++ b/server/test/unittest_migractions.py Mon Jan 13 13:47:47 2014 +0100 @@ -45,19 +45,18 @@ tags = CubicWebTC.tags | Tags(('server', 'migration', 'migractions')) - @classmethod - def _init_repo(cls): - super(MigrationCommandsTC, cls)._init_repo() + def _init_repo(self): + super(MigrationCommandsTC, self)._init_repo() # we have to read schema from the database to get eid for schema entities - cls.repo.set_schema(cls.repo.deserialize_schema(), resetvreg=False) + self.repo.set_schema(self.repo.deserialize_schema(), resetvreg=False) # hack to read the schema from data/migrschema - config = cls.config + config = self.config config.appid = join('data', 'migratedapp') - config._apphome = cls.datapath('migratedapp') + config._apphome = self.datapath('migratedapp') global migrschema migrschema = config.load_schema() config.appid = 'data' - config._apphome = cls.datadir + config._apphome = self.datadir assert 'Folder' in migrschema def setUp(self): @@ -628,12 +627,12 @@ # # also we need more tests about introducing/removing base classes or # specialization relationship... - self.session.data['rebuild-infered'] = True + self.session.set_shared_data('rebuild-infered', True) try: self.session.execute('DELETE X specializes Y WHERE Y name "Para"') self.session.commit(free_cnxset=False) finally: - self.session.data['rebuild-infered'] = False + self.session.set_shared_data('rebuild-infered', False) self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), []) self.assertEqual(self.schema['Note'].specializes(), None)
--- a/server/test/unittest_msplanner.py Fri Jan 10 18:31:07 2014 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2809 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -"""unit tests for module cubicweb.server.msplanner""" - -from logilab.common.decorators import clear_cache -from yams.buildobjs import RelationDefinition -from rql import BadRQLQuery - -from cubicweb.devtools import get_test_db_handler, TestServerConfiguration -from cubicweb.devtools.repotest import BasePlannerTC, test_plan - -class _SetGenerator(object): - """singleton to easily create set using "s[0]" or "s[0,1,2]" for instance - """ - def __getitem__(self, key): - try: - it = iter(key) - except TypeError: - it = (key,) - return set(it) -s = _SetGenerator() - -from cubicweb.schema import ERQLExpression -from cubicweb.server.sources import AbstractSource -from cubicweb.server.msplanner import MSPlanner, PartPlanInformation - -class FakeUserROSource(AbstractSource): - support_entities = {'CWUser': False} - support_relations = {} - def syntax_tree_search(self, *args, **kwargs): - return [] - - -class FakeCardSource(AbstractSource): - support_entities = {'Card': True, 'Note': True, 'State': True} - support_relations = {'in_state': True, 'multisource_rel': True, 'multisource_inlined_rel': True, - 'multisource_crossed_rel': True,} - dont_cross_relations = set(('fiche', 'state_of')) - cross_relations = set(('multisource_crossed_rel',)) - - def syntax_tree_search(self, *args, **kwargs): - return [] - - -class FakeDataFeedSource(FakeCardSource): - copy_based_source = True - -X_ALL_SOLS = sorted([{'X': 'Affaire'}, {'X': 'BaseTransition'}, {'X': 'Basket'}, - {'X': 'Bookmark'}, {'X': 'CWAttribute'}, {'X': 'CWCache'}, - {'X': 'CWConstraint'}, {'X': 'CWConstraintType'}, {'X': 'CWDataImport'}, {'X': 'CWEType'}, - {'X': 'CWGroup'}, {'X': 'CWPermission'}, {'X': 'CWProperty'}, - {'X': 'CWRType'}, {'X': 'CWRelation'}, - {'X': 'CWSource'}, {'X': 'CWSourceHostConfig'}, {'X': 'CWSourceSchemaConfig'}, - {'X': 'CWUser'}, {'X': 'CWUniqueTogetherConstraint'}, - {'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'}, - {'X': 'Email'}, {'X': 'EmailAddress'}, {'X': 'EmailPart'}, - {'X': 'EmailThread'}, {'X': 'ExternalUri'}, {'X': 'File'}, - {'X': 'Folder'}, {'X': 'Note'}, {'X': 'Old'}, - {'X': 'Personne'}, {'X': 'RQLExpression'}, {'X': 'Societe'}, - {'X': 'State'}, {'X': 'SubDivision'}, {'X': 'SubWorkflowExitPoint'}, - {'X': 'Tag'}, {'X': 'TrInfo'}, {'X': 'Transition'}, - {'X': 'Workflow'}, {'X': 'WorkflowTransition'}]) - - -# keep cnx so it's not garbage collected and the associated session is closed -def setUpModule(*args): - global repo, cnx - handler = get_test_db_handler(TestServerConfiguration(apphome=BaseMSPlannerTC.datadir)) - handler.build_db_cache() - repo, cnx = handler.get_repo_and_cnx() - -def tearDownModule(*args): - global repo, cnx - del repo, cnx - - -class BaseMSPlannerTC(BasePlannerTC): - """test planner related feature on a 3-sources repository: - - * system source supporting everything - * ldap source supporting CWUser - * rql source supporting Card - """ - - def setUp(self): - self.__class__.repo = repo - #_QuerierTC.setUp(self) - self.setup() - # hijack Affaire security - affreadperms = list(self.schema['Affaire'].permissions['read']) - self.prevrqlexpr_affaire = affreadperms[-1] - # add access to type attribute so S can't be invariant - affreadperms[-1] = ERQLExpression('X concerne S?, S owned_by U, S type "X"') - self.schema['Affaire'].set_action_permissions('read', affreadperms) - # hijack CWUser security - userreadperms = list(self.schema['CWUser'].permissions['read']) - self.prevrqlexpr_user = userreadperms[-1] - userreadperms[-1] = ERQLExpression('X owned_by U') - self.schema['CWUser'].set_action_permissions('read', userreadperms) - self.add_source(FakeUserROSource, 'ldap') - self.add_source(FakeCardSource, 'cards') - self.add_source(FakeDataFeedSource, 'datafeed') - - def tearDown(self): - # restore hijacked security - self.restore_orig_affaire_security() - self.restore_orig_cwuser_security() - super(BaseMSPlannerTC, self).tearDown() - - def restore_orig_affaire_security(self): - affreadperms = list(self.schema['Affaire'].permissions['read']) - affreadperms[-1] = self.prevrqlexpr_affaire - self.schema['Affaire'].set_action_permissions('read', affreadperms) - - def restore_orig_cwuser_security(self): - if hasattr(self, '_orig_cwuser_security_restored'): - return - self._orig_cwuser_security_restored = True - userreadperms = list(self.schema['CWUser'].permissions['read']) - userreadperms[-1] = self.prevrqlexpr_user - self.schema['CWUser'].set_action_permissions('read', userreadperms) - - -class PartPlanInformationTC(BaseMSPlannerTC): - - def _test(self, rql, *args): - if len(args) == 3: - kwargs, sourcesterms, needsplit = args - else: - sourcesterms, needsplit = args - kwargs = None - plan = self._prepare_plan(rql, kwargs) - union = plan.rqlst - plan.preprocess(union) - ppi = PartPlanInformation(plan, union.children[0]) - for sourcevars in ppi._sourcesterms.itervalues(): - for var in list(sourcevars): - solindices = sourcevars.pop(var) - sourcevars[var._ms_table_key()] = solindices - self.assertEqual(ppi._sourcesterms, sourcesterms) - self.assertEqual(ppi.needsplit, needsplit) - - - def test_simple_system_only(self): - """retrieve entities only supported by the system source""" - self._test('CWGroup X', - {self.system: {'X': s[0]}}, False) - - def test_simple_system_ldap(self): - """retrieve CWUser X from both sources and return concatenation of results - """ - self._test('CWUser X', - {self.system: {'X': s[0]}, self.ldap: {'X': s[0]}}, False) - - def test_simple_system_rql(self): - """retrieve Card X from both sources and return concatenation of results - """ - self._test('Any X, XT WHERE X is Card, X title XT', - {self.system: {'X': s[0]}, self.cards: {'X': s[0]}}, False) - - def test_simple_eid_specified(self): - """retrieve CWUser X from system source (eid is specified, can locate the entity) - """ - ueid = self.session.user.eid - self._test('Any X,L WHERE X eid %(x)s, X login L', {'x': ueid}, - {self.system: {'X': s[0]}}, False) - - def test_simple_eid_invariant(self): - """retrieve CWUser X from system source (eid is specified, can locate the entity) - """ - ueid = self.session.user.eid - self._test('Any X WHERE X eid %(x)s', {'x': ueid}, - {self.system: {'x': s[0]}}, False) - - def test_simple_invariant(self): - """retrieve CWUser X from system source only (X is invariant and in_group not supported by ldap source) - """ - self._test('Any X WHERE X is CWUser, X in_group G, G name "users"', - {self.system: {'X': s[0], 'G': s[0], 'in_group': s[0]}}, False) - - def test_security_has_text(self): - """retrieve CWUser X from system source only (has_text not supported by ldap source) - """ - # specify CWUser instead of any since the way this test is written we aren't well dealing - # with ambigous query (eg only considering the first solution) - self._test('CWUser X WHERE X has_text "bla"', - {self.system: {'X': s[0]}}, False) - - def test_complex_base(self): - """ - 1. retrieve Any X, L WHERE X is CWUser, X login L from system and ldap sources, store - concatenation of results into a temporary table - 2. return the result of Any X, L WHERE X is TMP, X login L, X in_group G, - G name 'users' on the system source - """ - self._test('Any X,L WHERE X is CWUser, X in_group G, X login L, G name "users"', - {self.system: {'X': s[0], 'G': s[0], 'in_group': s[0]}, - self.ldap : {'X': s[0]}}, True) - - def test_complex_invariant_ordered(self): - """ - 1. retrieve Any X,AA WHERE X modification_date AA from system and ldap sources, store - concatenation of results into a temporary table - 2. return the result of Any X,AA ORDERBY AA WHERE %s owned_by X, X modification_date AA - on the system source - """ - ueid = self.session.user.eid - self._test('Any X,AA ORDERBY AA WHERE E eid %(x)s, E owned_by X, X modification_date AA', {'x': ueid}, - {self.system: {'x': s[0], 'X': s[0], 'owned_by': s[0]}, - self.ldap : {'X': s[0]}}, True) - - def test_complex_invariant(self): - """ - 1. retrieve Any X,L,AA WHERE X login L, X modification_date AA from system and ldap sources, store - concatenation of results into a temporary table - 2. return the result of Any X,L,AA WHERE %s owned_by X, X login L, X modification_date AA - on the system source - """ - ueid = self.session.user.eid - self._test('Any X,L,AA WHERE E eid %(x)s, E owned_by X, X login L, X modification_date AA', {'x': ueid}, - {self.system: {'x': s[0], 'X': s[0], 'owned_by': s[0]}, - self.ldap : {'X': s[0]}}, True) - - def test_complex_ambigous(self): - """retrieve CWUser X from system and ldap sources, Person X from system source only - """ - self._test('Any X,F WHERE X firstname F', - {self.system: {'X': s[0, 1]}, - self.ldap: {'X': s[0]}}, True) - - def test_complex_multiple(self): - """ - 1. retrieve Any X,A,Y,B WHERE X login A, Y login B from system and ldap sources, store - cartesian product of results into a temporary table - 2. return the result of Any X,Y WHERE X login 'syt', Y login 'adim' - on the system source - """ - ueid = self.session.user.eid - self._test('Any X,Y WHERE X login "syt", Y login "adim"', {'x': ueid}, - {self.system: {'Y': s[0], 'X': s[0]}, - self.ldap: {'Y': s[0], 'X': s[0]}}, True) - - def test_complex_aggregat(self): - solindexes = set(range(len([e for e in self.schema.entities() if not e.final]))) - self._test('Any MAX(X)', - {self.system: {'X': solindexes}}, False) - - def test_complex_optional(self): - ueid = self.session.user.eid - self._test('Any U WHERE WF wf_info_for X, X eid %(x)s, WF owned_by U?, WF from_state FS', {'x': ueid}, - {self.system: {'WF': s[0], 'FS': s[0], 'U': s[0], - 'from_state': s[0], 'owned_by': s[0], 'wf_info_for': s[0], - 'x': s[0]}}, - False) - - def test_exists4(self): - """ - State S could come from both rql source and system source, - but since X cannot come from the rql source, the solution - {self.cards : 'S'} must be removed - """ - self._test('Any G,L WHERE X in_group G, X login L, G name "managers", ' - 'EXISTS(X copain T, T login L, T login in ("comme", "cochon")) OR ' - 'EXISTS(X in_state S, S name "pascontent", NOT X copain T2, T2 login "billy")', - {self.system: {'X': s[0], 'S': s[0], 'T2': s[0], 'T': s[0], 'G': s[0], 'copain': s[0], 'in_group': s[0]}, - self.ldap: {'X': s[0], 'T2': s[0], 'T': s[0]}}, - True) - - def test_relation_need_split(self): - self._test('Any X, S WHERE X in_state S', - {self.system: {'X': s[0, 1, 2], 'S': s[0, 1, 2]}, - self.cards: {'X': s[2], 'S': s[2]}}, - True) - - def test_not_relation_need_split(self): - self._test('Any SN WHERE NOT X in_state S, S name SN', - {self.cards: {'X': s[2], 'S': s[0, 1, 2]}, - self.system: {'X': s[0, 1, 2], 'S': s[0, 1, 2]}}, - True) - - def test_not_relation_no_split_external(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - # similar to the above test but with an eid coming from the external source. - # the same plan may be used, since we won't find any record in the system source - # linking 9999999 to a state - self._test('Any SN WHERE NOT X in_state S, X eid %(x)s, S name SN', - {'x': 999999}, - {self.cards: {'x': s[0], 'S': s[0]}, - self.system: {'x': s[0], 'S': s[0]}}, - False) - - def test_relation_restriction_ambigous_need_split(self): - self._test('Any X,T WHERE X in_state S, S name "pending", T tags X', - {self.system: {'X': s[0, 1, 2], 'S': s[0, 1, 2], 'T': s[0, 1, 2], 'tags': s[0, 1, 2]}, - self.cards: {'X': s[2], 'S': s[2]}}, - True) - - def test_simplified_var(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - # need access to source since X table has to be accessed because of the outer join - self._test('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR (X require_permission P?, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s', - {'x': 999999, 'u': self.session.user.eid}, - {self.system: {'P': s[0], 'G': s[0], - 'require_permission': s[0], 'in_group': s[0], 'P': s[0], 'require_group': s[0], - 'u': s[0]}, - self.cards: {'X': s[0]}}, - True) - - def test_delete_relation1(self): - ueid = self.session.user.eid - self._test('Any X, Y WHERE X created_by Y, X eid %(x)s, NOT Y eid %(y)s', - {'x': ueid, 'y': ueid}, - {self.system: {'Y': s[0], 'created_by': s[0], 'x': s[0]}}, - False) - - def test_crossed_relation_eid_1_needattr(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - ueid = self.session.user.eid - self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T', - {'x': 999999,}, - {self.cards: {'Y': s[0]}, self.system: {'Y': s[0], 'x': s[0]}}, - True) - - def test_crossed_relation_eid_1_invariant(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y', - {'x': 999999}, - {self.system: {'Y': s[0], 'x': s[0]}}, - False) - - def test_crossed_relation_eid_2_invariant(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y', - {'x': 999999,}, - {self.cards: {'Y': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]}, - self.system: {'Y': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]}}, - False) - - def test_version_crossed_depends_on_1(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE', - {'x': 999999}, - {self.cards: {'X': s[0], 'AD': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]}, - self.system: {'X': s[0], 'AD': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]}}, - True) - - def test_version_crossed_depends_on_2(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE', - {'x': 999999}, - {self.cards: {'X': s[0], 'AD': s[0]}, - self.system: {'X': s[0], 'AD': s[0], 'x': s[0]}}, - True) - - def test_simplified_var_3(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - repo._type_source_cache[999998] = ('State', 'cards', 999998, 'cards') - self._test('Any S,T WHERE S eid %(s)s, N eid %(n)s, N type T, N is Note, S is State', - {'n': 999999, 's': 999998}, - {self.cards: {'s': s[0], 'N': s[0]}}, False) - - - -class MSPlannerTC(BaseMSPlannerTC): - - def setUp(self): - BaseMSPlannerTC.setUp(self) - self.planner = MSPlanner(self.o.schema, self.repo.vreg.rqlhelper) - for cached in ('rel_type_sources', 'can_cross_relation', 'is_multi_sources_relation'): - clear_cache(self.repo, cached) - - _test = test_plan - - def test_simple_system_only(self): - """retrieve entities only supported by the system source - """ - self._test('CWGroup X', - [('OneFetchStep', [('Any X WHERE X is CWGroup', [{'X': 'CWGroup'}])], - None, None, [self.system], {}, [])]) - - def test_simple_system_only_limit(self): - """retrieve entities only supported by the system source - """ - self._test('CWGroup X LIMIT 10', - [('OneFetchStep', [('Any X LIMIT 10 WHERE X is CWGroup', [{'X': 'CWGroup'}])], - 10, None, [self.system], {}, [])]) - - def test_simple_system_only_limit_offset(self): - """retrieve entities only supported by the system source - """ - self._test('CWGroup X LIMIT 10 OFFSET 10', - [('OneFetchStep', [('Any X LIMIT 10 OFFSET 10 WHERE X is CWGroup', [{'X': 'CWGroup'}])], - 10, 10, [self.system], {}, [])]) - - def test_simple_system_ldap(self): - """retrieve CWUser X from both sources and return concatenation of results - """ - self._test('CWUser X', - [('OneFetchStep', [('Any X WHERE X is CWUser', [{'X': 'CWUser'}])], - None, None, [self.ldap, self.system], {}, [])]) - - def test_simple_system_ldap_limit(self): - """retrieve CWUser X from both sources and return concatenation of results - """ - self._test('CWUser X LIMIT 10', - [('OneFetchStep', [('Any X LIMIT 10 WHERE X is CWUser', [{'X': 'CWUser'}])], - 10, None, [self.ldap, self.system], {}, [])]) - - def test_simple_system_ldap_limit_offset(self): - """retrieve CWUser X from both sources and return concatenation of results - """ - self._test('CWUser X LIMIT 10 OFFSET 10', - [('OneFetchStep', [('Any X LIMIT 10 OFFSET 10 WHERE X is CWUser', [{'X': 'CWUser'}])], - 10, 10, [self.ldap, self.system], {}, [])]) - - def test_simple_system_ldap_ordered_limit_offset(self): - """retrieve CWUser X from both sources and return concatenation of results - """ - self._test('CWUser X ORDERBY X LIMIT 10 OFFSET 10', - [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY table0.C0\nLIMIT 10\nOFFSET 10', None, [ - ('FetchStep', [('Any X WHERE X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], {}, {'X': 'table0.C0'}, []), - ]), - ]) - def test_simple_system_ldap_aggregat(self): - """retrieve CWUser X from both sources and return concatenation of results - """ - # COUNT(X) is kept in sub-step and transformed into SUM(X) in the AggrStep - self._test('Any COUNT(X) WHERE X is CWUser', - [('AggrStep', 'SELECT SUM(table0.C0) FROM table0', None, [ - ('FetchStep', [('Any COUNT(X) WHERE X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], {}, {'COUNT(X)': 'table0.C0'}, []), - ]), - ]) - - def test_simple_system_rql(self): - """retrieve Card X from both sources and return concatenation of results - """ - self._test('Any X, XT WHERE X is Card, X title XT', - [('OneFetchStep', [('Any X,XT WHERE X is Card, X title XT', [{'X': 'Card', 'XT': 'String'}])], - None, None, [self.cards, self.system], {}, [])]) - - def test_simple_eid_specified(self): - """retrieve CWUser X from system source (eid is specified, can locate the entity) - """ - ueid = self.session.user.eid - self._test('Any X,L WHERE X eid %(x)s, X login L', - [('OneFetchStep', [('Any X,L WHERE X eid %s, X login L'%ueid, [{'X': 'CWUser', 'L': 'String'}])], - None, None, [self.system], {}, [])], - {'x': ueid}) - - def test_simple_eid_invariant(self): - """retrieve CWUser X from system source (eid is specified, can locate the entity) - """ - ueid = self.session.user.eid - self._test('Any X WHERE X eid %(x)s', - [('OneFetchStep', [('Any %s'%ueid, [{}])], - None, None, [self.system], {}, [])], - {'x': ueid}) - - def test_simple_invariant(self): - """retrieve CWUser X from system source only (X is invariant and in_group not supported by ldap source) - """ - self._test('Any X WHERE X is CWUser, X in_group G, G name "users"', - [('OneFetchStep', [('Any X WHERE X is CWUser, X in_group G, G name "users"', - [{'X': 'CWUser', 'G': 'CWGroup'}])], - None, None, [self.system], {}, [])]) - - def test_complex_base(self): - """ - 1. retrieve Any X, L WHERE X is CWUser, X login L from system and ldap sources, store - concatenation of results into a temporary table - 2. return the result of Any X, L WHERE X is TMP, X login LX in_group G, - G name 'users' on the system source - """ - self._test('Any X,L WHERE X is CWUser, X in_group G, X login L, G name "users"', - [('FetchStep', [('Any X,L WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, - {'X': 'table0.C0', 'X.login': 'table0.C1', 'L': 'table0.C1'}, []), - ('OneFetchStep', [('Any X,L WHERE X in_group G, X login L, G name "users", G is CWGroup, X is CWUser', - [{'X': 'CWUser', 'L': 'String', 'G': 'CWGroup'}])], - None, None, [self.system], - {'X': 'table0.C0', 'X.login': 'table0.C1', 'L': 'table0.C1'}, []) - ]) - - def test_complex_base_limit_offset(self): - """ - 1. retrieve Any X, L WHERE X is CWUser, X login L from system and ldap sources, store - concatenation of results into a temporary table - 2. return the result of Any X, L WHERE X is TMP, X login LX in_group G, - G name 'users' on the system source - """ - self._test('Any X,L LIMIT 10 OFFSET 10 WHERE X is CWUser, X in_group G, X login L, G name "users"', - [('FetchStep', [('Any X,L WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, - {'X': 'table0.C0', 'X.login': 'table0.C1', 'L': 'table0.C1'}, []), - ('OneFetchStep', [('Any X,L LIMIT 10 OFFSET 10 WHERE X in_group G, X login L, G name "users", G is CWGroup, X is CWUser', - [{'X': 'CWUser', 'L': 'String', 'G': 'CWGroup'}])], - 10, 10, - [self.system], {'X': 'table0.C0', 'X.login': 'table0.C1', 'L': 'table0.C1'}, []) - ]) - - def test_complex_ordered(self): - self._test('Any L ORDERBY L WHERE X login L', - [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY table0.C0', None, - [('FetchStep', [('Any L WHERE X login L, X is CWUser', - [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], {}, {'X.login': 'table0.C0', 'L': 'table0.C0'}, []), - ]) - ]) - - def test_complex_ordered_limit_offset(self): - self._test('Any L ORDERBY L LIMIT 10 OFFSET 10 WHERE X login L', - [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY table0.C0\nLIMIT 10\nOFFSET 10', None, - [('FetchStep', [('Any L WHERE X login L, X is CWUser', - [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], {}, {'X.login': 'table0.C0', 'L': 'table0.C0'}, []), - ]) - ]) - - def test_complex_invariant_ordered(self): - """ - 1. retrieve Any X,AA WHERE X modification_date AA from system and ldap sources, store - concatenation of results into a temporary table - 2. return the result of Any X,AA ORDERBY AA WHERE %s owned_by X, X modification_date AA - on the system source - - herrr, this is what is expected by the XXX :(, not the actual result (which is correct anyway) - """ - ueid = self.session.user.eid - self._test('Any X,AA ORDERBY AA WHERE E eid %(x)s, E owned_by X, X modification_date AA', - [('FetchStep', - [('Any X,AA WHERE X modification_date AA, X is CWUser', - [{'AA': 'Datetime', 'X': 'CWUser'}])], - [self.ldap, self.system], None, - {'AA': 'table0.C1', 'X': 'table0.C0', 'X.modification_date': 'table0.C1'}, []), - ('OneFetchStep', - [('Any X,AA ORDERBY AA WHERE %s owned_by X, X modification_date AA, X is CWUser' % ueid, - [{'AA': 'Datetime', 'X': 'CWUser'}])], - None, None, [self.system], - {'AA': 'table0.C1', 'X': 'table0.C0', 'X.modification_date': 'table0.C1'}, []), - ], - {'x': ueid}) - - def test_complex_invariant(self): - """ - 1. retrieve Any X,L,AA WHERE X login L, X modification_date AA from system and ldap sources, store - concatenation of results into a temporary table - 2. return the result of Any X,L,AA WHERE %s owned_by X, X login L, X modification_date AA - on the system source - """ - ueid = self.session.user.eid - self._test('Any X,L,AA WHERE E eid %(x)s, E owned_by X, X login L, X modification_date AA', - [('FetchStep', [('Any X,L,AA WHERE X login L, X modification_date AA, X is CWUser', - [{'AA': 'Datetime', 'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, - {'AA': 'table0.C2', 'X': 'table0.C0', 'X.login': 'table0.C1', 'X.modification_date': 'table0.C2', 'L': 'table0.C1'}, []), - ('OneFetchStep', [('Any X,L,AA WHERE %s owned_by X, X login L, X modification_date AA, X is CWUser'%ueid, - [{'AA': 'Datetime', 'X': 'CWUser', 'L': 'String'}])], - None, None, [self.system], - {'AA': 'table0.C2', 'X': 'table0.C0', 'X.login': 'table0.C1', 'X.modification_date': 'table0.C2', 'L': 'table0.C1'}, [])], - {'x': ueid}) - - def test_complex_ambigous(self): - """retrieve CWUser X from system and ldap sources, Person X from system source only - """ - self._test('Any X,F WHERE X firstname F', - [('UnionStep', None, None, [ - ('OneFetchStep', [('Any X,F WHERE X firstname F, X is CWUser', - [{'X': 'CWUser', 'F': 'String'}])], - None, None, [self.ldap, self.system], {}, []), - ('OneFetchStep', [('Any X,F WHERE X firstname F, X is Personne', - [{'X': 'Personne', 'F': 'String'}])], - None, None, [self.system], {}, []), - ]), - ]) - - def test_complex_ambigous_limit_offset(self): - """retrieve CWUser X from system and ldap sources, Person X from system source only - """ - self._test('Any X,F LIMIT 10 OFFSET 10 WHERE X firstname F', - [('UnionStep', 10, 10, [ - ('OneFetchStep', [('Any X,F WHERE X firstname F, X is CWUser', - [{'X': 'CWUser', 'F': 'String'}])], - None, None, - [self.ldap, self.system], {}, []), - ('OneFetchStep', [('Any X,F WHERE X firstname F, X is Personne', - [{'X': 'Personne', 'F': 'String'}])], - None, None, [self.system], {}, []), - ]), - ]) - - def test_complex_ambigous_ordered(self): - """ - 1. retrieve CWUser X from system and ldap sources, Person X from system source only, store - each result in the same temp table - 2. return content of the table sorted - """ - self._test('Any X,F ORDERBY F WHERE X firstname F', - [('AggrStep', 'SELECT table0.C0, table0.C1 FROM table0\nORDER BY table0.C1', None, - [('FetchStep', [('Any X,F WHERE X firstname F, X is CWUser', - [{'X': 'CWUser', 'F': 'String'}])], - [self.ldap, self.system], {}, - {'X': 'table0.C0', 'X.firstname': 'table0.C1', 'F': 'table0.C1'}, []), - ('FetchStep', [('Any X,F WHERE X firstname F, X is Personne', - [{'X': 'Personne', 'F': 'String'}])], - [self.system], {}, - {'X': 'table0.C0', 'X.firstname': 'table0.C1', 'F': 'table0.C1'}, []), - ]), - ]) - - def test_complex_multiple(self): - """ - 1. retrieve Any X,A,Y,B WHERE X login A, Y login B from system and ldap sources, store - cartesian product of results into a temporary table - 2. return the result of Any X,Y WHERE X login 'syt', Y login 'adim' - on the system source - """ - ueid = self.session.user.eid - self._test('Any X,Y WHERE X login "syt", Y login "adim"', - [('FetchStep', - [('Any X WHERE X login "syt", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, - {'X': 'table0.C0'}, []), - ('FetchStep', - [('Any Y WHERE Y login "adim", Y is CWUser', [{'Y': 'CWUser'}])], - [self.ldap, self.system], None, - {'Y': 'table1.C0'}, []), - ('OneFetchStep', - [('Any X,Y WHERE X is CWUser, Y is CWUser', [{'X': 'CWUser', 'Y': 'CWUser'}])], - None, None, [self.system], - {'X': 'table0.C0', 'Y': 'table1.C0'}, []) - ], {'x': ueid}) - - def test_complex_multiple_limit_offset(self): - """ - 1. retrieve Any X,A,Y,B WHERE X login A, Y login B from system and ldap sources, store - cartesian product of results into a temporary table - 2. return the result of Any X,Y WHERE X login 'syt', Y login 'adim' - on the system source - """ - self._test('Any X,Y LIMIT 10 OFFSET 10 WHERE X login "syt", Y login "adim"', - [('FetchStep', - [('Any X WHERE X login "syt", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C0'}, []), - ('FetchStep', - [('Any Y WHERE Y login "adim", Y is CWUser', [{'Y': 'CWUser'}])], - [self.ldap, self.system], None, {'Y': 'table1.C0'}, []), - ('OneFetchStep', - [('Any X,Y LIMIT 10 OFFSET 10 WHERE X is CWUser, Y is CWUser', [{'X': 'CWUser', 'Y': 'CWUser'}])], - 10, 10, [self.system], - {'X': 'table0.C0', 'Y': 'table1.C0'}, []) - ]) - - def test_complex_aggregat(self): - self._test('Any MAX(X)', - [('OneFetchStep', - [('Any MAX(X)', X_ALL_SOLS)], - None, None, [self.system], {}, []) - ]) - - def test_complex_typed_aggregat(self): - self._test('Any MAX(X) WHERE X is Card', - [('AggrStep', 'SELECT MAX(table0.C0) FROM table0', None, - [('FetchStep', - [('Any MAX(X) WHERE X is Card', [{'X': 'Card'}])], - [self.cards, self.system], {}, {'MAX(X)': 'table0.C0'}, []) - ]) - ]) - - def test_complex_greater_eid(self): - self._test('Any X WHERE X eid > 12', - [('OneFetchStep', - [('Any X WHERE X eid > 12', X_ALL_SOLS)], - None, None, [self.system], {}, []) - ]) - - def test_complex_greater_typed_eid(self): - self._test('Any X WHERE X eid > 12, X is Card', - [('OneFetchStep', - [('Any X WHERE X eid > 12, X is Card', [{'X': 'Card'}])], - None, None, [self.system], {}, []) - ]) - - def test_complex_optional(self): - ueid = self.session.user.eid - self._test('Any U WHERE WF wf_info_for X, X eid %(x)s, WF owned_by U?, WF from_state FS', - [('OneFetchStep', [('Any U WHERE WF wf_info_for %s, WF owned_by U?, WF from_state FS' % ueid, - [{'WF': 'TrInfo', 'FS': 'State', 'U': 'CWUser'}])], - None, None, [self.system], {}, [])], - {'x': ueid}) - - def test_complex_optional(self): - ueid = self.session.user.eid - self._test('Any U WHERE WF wf_info_for X, X eid %(x)s, WF owned_by U?, WF from_state FS', - [('OneFetchStep', [('Any U WHERE WF wf_info_for %s, WF owned_by U?, WF from_state FS' % ueid, - [{'WF': 'TrInfo', 'FS': 'State', 'U': 'CWUser'}])], - None, None, [self.system], {}, [])], - {'x': ueid}) - - - def test_3sources_ambigous(self): - self._test('Any X,T WHERE X owned_by U, U login "syt", X title T, X is IN(Bookmark, Card, EmailThread)', - [('FetchStep', [('Any X,T WHERE X title T, X is Card', [{'X': 'Card', 'T': 'String'}])], - [self.cards, self.system], None, - {'T': 'table0.C1', 'X': 'table0.C0', 'X.title': 'table0.C1'}, []), - ('FetchStep', [('Any U WHERE U login "syt", U is CWUser', [{'U': 'CWUser'}])], - [self.ldap, self.system], None, - {'U': 'table1.C0'}, []), - ('UnionStep', None, None, [ - ('OneFetchStep', [('Any X,T WHERE X owned_by U, X title T, U is CWUser, X is IN(Bookmark, EmailThread)', - [{'T': 'String', 'U': 'CWUser', 'X': 'Bookmark'}, - {'T': 'String', 'U': 'CWUser', 'X': 'EmailThread'}])], - None, None, [self.system], {'U': 'table1.C0'}, []), - ('OneFetchStep', [('Any X,T WHERE X owned_by U, X title T, U is CWUser, X is Card', - [{'X': 'Card', 'U': 'CWUser', 'T': 'String'}])], - None, None, [self.system], - {'X': 'table0.C0', 'X.title': 'table0.C1', 'T': 'table0.C1', 'U': 'table1.C0'}, []), - ]), - ]) - - def test_restricted_max(self): - # dumb query to emulate the one generated by svnfile.entities.rql_revision_content - self._test('Any V, MAX(VR) WHERE V is Card, V creation_date VR, ' - '(V creation_date TODAY OR (V creation_date < TODAY AND NOT EXISTS(' - 'X is Card, X creation_date < TODAY, X creation_date >= VR)))', - [('FetchStep', [('Any VR WHERE X creation_date < TODAY, X creation_date VR, X is Card', - [{'X': 'Card', 'VR': 'Datetime'}])], - [self.cards, self.system], None, - {'VR': 'table0.C0', 'X.creation_date': 'table0.C0'}, []), - ('FetchStep', [('Any V,VR WHERE V creation_date VR, V is Card', - [{'VR': 'Datetime', 'V': 'Card'}])], - [self.cards, self.system], None, - {'VR': 'table1.C1', 'V': 'table1.C0', 'V.creation_date': 'table1.C1'}, []), - ('OneFetchStep', [('Any V,MAX(VR) WHERE V creation_date VR, (V creation_date TODAY) OR (V creation_date < TODAY, NOT EXISTS(X creation_date >= VR, X is Card)), V is Card', - [{'X': 'Card', 'VR': 'Datetime', 'V': 'Card'}])], - None, None, [self.system], - {'VR': 'table1.C1', 'V': 'table1.C0', 'V.creation_date': 'table1.C1', 'X.creation_date': 'table0.C0'}, []) - ]) - - def test_outer_supported_rel1(self): - # both system and rql support all variables, can be - self._test('Any X, R WHERE X is Note, X in_state S, X type R, ' - 'NOT EXISTS(Y is Note, Y in_state S, Y type R, X identity Y)', - [('OneFetchStep', [('Any X,R WHERE X is Note, X in_state S, X type R, NOT EXISTS(Y is Note, Y in_state S, Y type R, X identity Y), S is State', - [{'Y': 'Note', 'X': 'Note', 'S': 'State', 'R': 'String'}])], - None, None, - [self.cards, self.system], {}, []) - ]) - - def test_not_identity(self): - ueid = self.session.user.eid - self._test('Any X WHERE NOT X identity U, U eid %s, X is CWUser' % ueid, - [('OneFetchStep', - [('Any X WHERE NOT X identity %s, X is CWUser' % ueid, [{'X': 'CWUser'}])], - None, None, - [self.ldap, self.system], {}, []) - ]) - - def test_outer_supported_rel2(self): - self._test('Any X, MAX(R) GROUPBY X WHERE X in_state S, X login R, ' - 'NOT EXISTS(Y is Note, Y in_state S, Y type R)', - [('FetchStep', [('Any A,R WHERE Y in_state A, Y type R, A is State, Y is Note', - [{'Y': 'Note', 'A': 'State', 'R': 'String'}])], - [self.cards, self.system], None, - {'A': 'table0.C0', 'R': 'table0.C1', 'Y.type': 'table0.C1'}, []), - ('FetchStep', [('Any X,R WHERE X login R, X is CWUser', [{'X': 'CWUser', 'R': 'String'}])], - [self.ldap, self.system], None, - {'X': 'table1.C0', 'X.login': 'table1.C1', 'R': 'table1.C1'}, []), - ('OneFetchStep', [('Any X,MAX(R) GROUPBY X WHERE X in_state S, X login R, NOT EXISTS(Y type R, S identity A, A is State, Y is Note), S is State, X is CWUser', - [{'Y': 'Note', 'X': 'CWUser', 'S': 'State', 'R': 'String', 'A': 'State'}])], - None, None, [self.system], - {'A': 'table0.C0', 'X': 'table1.C0', 'X.login': 'table1.C1', 'R': 'table1.C1', 'Y.type': 'table0.C1'}, []) - ]) - - def test_security_has_text(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X WHERE X has_text "bla"', - [('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])], - [self.cards, self.system], None, {'E': 'table0.C0'}, []), - ('UnionStep', None, None, - [('OneFetchStep', - [(u'Any X WHERE X has_text "bla", (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), X is Affaire' % {'ueid': ueid}, - [{'C': 'Division', 'E': 'Note', 'D': 'Affaire', 'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire', 'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire'}])], - None, None, [self.system], {'E': 'table0.C0'}, []), - ('OneFetchStep', - [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is IN(Basket, CWUser)' % ueid, - [{'X': 'Basket'}, {'X': 'CWUser'}]), - ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Note, Personne, Societe, SubDivision, Tag)', - [{'X': 'Card'}, {'X': 'Comment'}, - {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailThread'}, - {'X': 'File'}, {'X': 'Folder'}, - {'X': 'Note'}, {'X': 'Personne'}, {'X': 'Societe'}, - {'X': 'SubDivision'}, {'X': 'Tag'}]),], - None, None, [self.system], {}, []), - ]) - ]) - - def test_security_has_text_limit_offset(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - # note: same as the above query but because of the subquery usage, the - # display differs (not printing solutions for each union) - self._test('Any X LIMIT 10 OFFSET 10 WHERE X has_text "bla"', - [('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])], - [self.cards, self.system], None, {'E': 'table1.C0'}, []), - ('UnionFetchStep', [ - ('FetchStep', [('Any X WHERE X has_text "bla", (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), X is Affaire' % {'ueid': ueid}, - [{'C': 'Division', 'E': 'Note', 'D': 'Affaire', 'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire', 'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire'}])], - [self.system], {'E': 'table1.C0'}, {'X': 'table0.C0'}, []), - ('FetchStep', - [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is IN(Basket, CWUser)' % ueid, - [{'X': 'Basket'}, {'X': 'CWUser'}]), - ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Note, Personne, Societe, SubDivision, Tag)', - [{'X': 'Card'}, {'X': 'Comment'}, - {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailThread'}, - {'X': 'File'}, {'X': 'Folder'}, - {'X': 'Note'}, {'X': 'Personne'}, {'X': 'Societe'}, - {'X': 'SubDivision'}, {'X': 'Tag'}])], - [self.system], {}, {'X': 'table0.C0'}, []), - ]), - ('OneFetchStep', - [('Any X LIMIT 10 OFFSET 10', - [{'X': 'Affaire'}, {'X': 'Basket'}, - {'X': 'CWUser'}, {'X': 'Card'}, {'X': 'Comment'}, - {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailThread'}, - {'X': 'File'}, {'X': 'Folder'}, - {'X': 'Note'}, {'X': 'Personne'}, {'X': 'Societe'}, - {'X': 'SubDivision'}, {'X': 'Tag'}])], - 10, 10, [self.system], {'X': 'table0.C0'}, []) - ]) - - def test_security_user(self): - """a guest user trying to see another user: EXISTS(X owned_by U) is automatically inserted""" - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X WHERE X login "bla"', - [('FetchStep', - [('Any X WHERE X login "bla", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C0'}, []), - ('OneFetchStep', - [('Any X WHERE EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C0'}, [])]) - - def test_security_complex_has_text(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X WHERE X has_text "bla", X firstname "bla"', - [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C0'}, []), - ('UnionStep', None, None, [ - ('OneFetchStep', [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C0'}, []), - ('OneFetchStep', [('Any X WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])], - None, None, [self.system], {}, []), - ]), - ]) - - def test_security_complex_has_text_limit_offset(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X LIMIT 10 OFFSET 10 WHERE X has_text "bla", X firstname "bla"', - [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table1.C0'}, []), - ('UnionFetchStep', [ - ('FetchStep', [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])], - [self.system], {'X': 'table1.C0'}, {'X': 'table0.C0'}, []), - ('FetchStep', [('Any X WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])], - [self.system], {}, {'X': 'table0.C0'}, []), - ]), - ('OneFetchStep', - [('Any X LIMIT 10 OFFSET 10', [{'X': 'CWUser'}, {'X': 'Personne'}])], - 10, 10, [self.system], {'X': 'table0.C0'}, []) - ]) - - def test_security_complex_aggregat(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - ALL_SOLS = X_ALL_SOLS[:] - ALL_SOLS.remove({'X': 'CWSourceHostConfig'}) # not authorized - ALL_SOLS.remove({'X': 'CWSourceSchemaConfig'}) # not authorized - ALL_SOLS.remove({'X': 'CWDataImport'}) # not authorized - self._test('Any MAX(X)', - [('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])], - [self.cards, self.system], None, {'E': 'table1.C0'}, []), - ('FetchStep', [('Any X WHERE X is IN(CWUser)', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table2.C0'}, []), - ('UnionFetchStep', [ - ('FetchStep', [('Any X WHERE EXISTS(%s use_email X), X is EmailAddress' % ueid, - [{'X': 'EmailAddress'}])], - [self.system], {}, {'X': 'table0.C0'}, []), - ('UnionFetchStep', - [('FetchStep', [('Any X WHERE X is IN(Card, Note, State)', - [{'X': 'Card'}, {'X': 'Note'}, {'X': 'State'}])], - [self.cards, self.system], {}, {'X': 'table0.C0'}, []), - ('FetchStep', - [('Any X WHERE X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWUniqueTogetherConstraint, Comment, Division, Email, EmailPart, EmailThread, ExternalUri, File, Folder, Old, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', - [{'X': 'BaseTransition'}, {'X': 'Bookmark'}, - {'X': 'CWAttribute'}, {'X': 'CWCache'}, - {'X': 'CWConstraint'}, {'X': 'CWConstraintType'}, - {'X': 'CWEType'}, {'X': 'CWGroup'}, - {'X': 'CWPermission'}, {'X': 'CWProperty'}, - {'X': 'CWRType'}, {'X': 'CWRelation'}, - {'X': 'CWSource'}, - {'X': 'CWUniqueTogetherConstraint'}, - {'X': 'Comment'}, {'X': 'Division'}, - {'X': 'Email'}, - {'X': 'EmailPart'}, {'X': 'EmailThread'}, - {'X': 'ExternalUri'}, {'X': 'File'}, - {'X': 'Folder'}, {'X': 'Old'}, - {'X': 'Personne'}, {'X': 'RQLExpression'}, - {'X': 'Societe'}, {'X': 'SubDivision'}, - {'X': 'SubWorkflowExitPoint'}, {'X': 'Tag'}, - {'X': 'TrInfo'}, {'X': 'Transition'}, - {'X': 'Workflow'}, {'X': 'WorkflowTransition'}])], - [self.system], {}, {'X': 'table0.C0'}, []), - ]), - ('FetchStep', [('Any X WHERE (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), X is Affaire' % {'ueid': ueid}, - [{'C': 'Division', 'E': 'Note', 'D': 'Affaire', 'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire', 'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire'}])], - [self.system], {'E': 'table1.C0'}, {'X': 'table0.C0'}, []), - ('UnionFetchStep', [ - ('FetchStep', [('Any X WHERE EXISTS(X owned_by %s), X is Basket' % ueid, - [{'X': 'Basket'}])], - [self.system], {}, {'X': 'table0.C0'}, []), - ('FetchStep', [('Any X WHERE EXISTS(X owned_by %s), X is CWUser' % ueid, - [{'X': 'CWUser'}])], - [self.system], {'X': 'table2.C0'}, {'X': 'table0.C0'}, []), - ]), - ]), - ('OneFetchStep', [('Any MAX(X)', ALL_SOLS)], - None, None, [self.system], {'X': 'table0.C0'}, []) - ]) - - def test_security_complex_aggregat2(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - X_ET_ALL_SOLS = [] - for s in X_ALL_SOLS: - if s in ({'X': 'CWSourceHostConfig'}, {'X': 'CWSourceSchemaConfig'}, {'X': 'CWDataImport'}): - continue # not authorized - ets = {'ET': 'CWEType'} - ets.update(s) - X_ET_ALL_SOLS.append(ets) - self._test('Any ET, COUNT(X) GROUPBY ET ORDERBY ET WHERE X is ET', - [('FetchStep', [('Any X WHERE X is IN(Card, Note, State)', - [{'X': 'Card'}, {'X': 'Note'}, {'X': 'State'}])], - [self.cards, self.system], None, {'X': 'table1.C0'}, []), - ('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])], - [self.cards, self.system], None, {'E': 'table2.C0'}, []), - ('FetchStep', [('Any X WHERE X is IN(CWUser)', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table3.C0'}, []), - ('UnionFetchStep', - [('FetchStep', [('Any ET,X WHERE X is ET, EXISTS(%s use_email X), ET is CWEType, X is EmailAddress' % ueid, - [{'ET': 'CWEType', 'X': 'EmailAddress'}]), - ], - [self.system], {}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []), - # extra UnionFetchStep could be avoided but has no cost, so don't care - ('UnionFetchStep', - [('FetchStep', [('Any ET,X WHERE X is ET, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWUniqueTogetherConstraint, Comment, Division, Email, EmailPart, EmailThread, ExternalUri, File, Folder, Old, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', - [{'X': 'BaseTransition', 'ET': 'CWEType'}, - {'X': 'Bookmark', 'ET': 'CWEType'}, {'X': 'CWAttribute', 'ET': 'CWEType'}, - {'X': 'CWCache', 'ET': 'CWEType'}, {'X': 'CWConstraint', 'ET': 'CWEType'}, - {'X': 'CWConstraintType', 'ET': 'CWEType'}, - {'X': 'CWEType', 'ET': 'CWEType'}, - {'X': 'CWGroup', 'ET': 'CWEType'}, {'X': 'CWPermission', 'ET': 'CWEType'}, - {'X': 'CWProperty', 'ET': 'CWEType'}, {'X': 'CWRType', 'ET': 'CWEType'}, - {'X': 'CWSource', 'ET': 'CWEType'}, - {'X': 'CWRelation', 'ET': 'CWEType'}, - {'X': 'CWUniqueTogetherConstraint', 'ET': 'CWEType'}, - {'X': 'Comment', 'ET': 'CWEType'}, - {'X': 'Division', 'ET': 'CWEType'}, {'X': 'Email', 'ET': 'CWEType'}, - {'X': 'EmailPart', 'ET': 'CWEType'}, - {'X': 'EmailThread', 'ET': 'CWEType'}, {'X': 'ExternalUri', 'ET': 'CWEType'}, - {'X': 'File', 'ET': 'CWEType'}, {'X': 'Folder', 'ET': 'CWEType'}, - {'X': 'Old', 'ET': 'CWEType'}, {'X': 'Personne', 'ET': 'CWEType'}, - {'X': 'RQLExpression', 'ET': 'CWEType'}, {'X': 'Societe', 'ET': 'CWEType'}, - {'X': 'SubDivision', 'ET': 'CWEType'}, {'X': 'SubWorkflowExitPoint', 'ET': 'CWEType'}, - {'X': 'Tag', 'ET': 'CWEType'}, {'X': 'TrInfo', 'ET': 'CWEType'}, - {'X': 'Transition', 'ET': 'CWEType'}, {'X': 'Workflow', 'ET': 'CWEType'}, - {'X': 'WorkflowTransition', 'ET': 'CWEType'}])], - [self.system], {}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []), - ('FetchStep', - [('Any ET,X WHERE X is ET, ET is CWEType, X is IN(Card, Note, State)', - [{'ET': 'CWEType', 'X': 'Card'}, - {'ET': 'CWEType', 'X': 'Note'}, - {'ET': 'CWEType', 'X': 'State'}])], - [self.system], {'X': 'table1.C0'}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []), - ]), - - ('FetchStep', [('Any ET,X WHERE X is ET, (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), ET is CWEType, X is Affaire' % {'ueid': ueid}, - [{'C': 'Division', 'E': 'Note', 'D': 'Affaire', - 'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire', - 'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire', - 'ET': 'CWEType'}])], - [self.system], {'E': 'table2.C0'}, {'ET': 'table0.C0', 'X': 'table0.C1'}, - []), - ('UnionFetchStep', [ - ('FetchStep', [('Any ET,X WHERE X is ET, EXISTS(X owned_by %s), ET is CWEType, X is Basket' % ueid, - [{'ET': 'CWEType', 'X': 'Basket'}])], - [self.system], {}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []), - ('FetchStep', [('Any ET,X WHERE X is ET, EXISTS(X owned_by %s), ET is CWEType, X is CWUser' % ueid, - [{'ET': 'CWEType', 'X': 'CWUser'}])], - [self.system], {'X': 'table3.C0'}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []), - ]), - ]), - ('OneFetchStep', - [('Any ET,COUNT(X) GROUPBY ET ORDERBY ET', X_ET_ALL_SOLS)], - None, None, [self.system], {'ET': 'table0.C0', 'X': 'table0.C1'}, []) - ]) - - def test_security_3sources(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X, XT WHERE X is Card, X owned_by U, X title XT, U login "syt"', - [('FetchStep', - [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], - [self.cards, self.system], None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []), - ('FetchStep', - [('Any U WHERE U login "syt", U is CWUser', [{'U': 'CWUser'}])], - [self.ldap, self.system], None, {'U': 'table1.C0'}, []), - ('OneFetchStep', - [('Any X,XT WHERE X owned_by U, X title XT, EXISTS(U owned_by %s), U is CWUser, X is Card' % ueid, - [{'X': 'Card', 'U': 'CWUser', 'XT': 'String'}])], - None, None, [self.system], - {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1', 'U': 'table1.C0'}, []) - ]) - - def test_security_3sources_identity(self): - self.restore_orig_cwuser_security() - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X, XT WHERE X is Card, X owned_by U, X title XT, U login "syt"', - [('FetchStep', - [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], - [self.cards, self.system], None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []), - ('OneFetchStep', - [('Any X,XT WHERE X owned_by U, X title XT, U login "syt", EXISTS(U identity %s), U is CWUser, X is Card' % ueid, - [{'U': 'CWUser', 'X': 'Card', 'XT': 'String'}])], - None, None, [self.system], {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []) - ]) - - def test_security_3sources_identity_optional_var(self): - self.restore_orig_cwuser_security() - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X,XT,U WHERE X is Card, X owned_by U?, X title XT, U login L', - [('FetchStep', - [('Any U,L WHERE U login L, EXISTS(U identity %s), U is CWUser' % ueid, - [{'L': 'String', u'U': 'CWUser'}])], - [self.system], {}, {'L': 'table0.C1', 'U': 'table0.C0', 'U.login': 'table0.C1'}, []), - ('FetchStep', - [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], - [self.cards, self.system], None, {'X': 'table1.C0', 'X.title': 'table1.C1', 'XT': 'table1.C1'}, []), - ('OneFetchStep', - [('Any X,XT,U WHERE X owned_by U?, X title XT, X is Card', - [{'X': 'Card', 'U': 'CWUser', 'XT': 'String'}])], - None, None, [self.system], {'L': 'table0.C1', - 'U': 'table0.C0', - 'X': 'table1.C0', - 'X.title': 'table1.C1', - 'XT': 'table1.C1'}, []) - ]) - - def test_security_3sources_limit_offset(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X, XT LIMIT 10 OFFSET 10 WHERE X is Card, X owned_by U, X title XT, U login "syt"', - [('FetchStep', - [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], - [self.cards, self.system], None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []), - ('FetchStep', - [('Any U WHERE U login "syt", U is CWUser', [{'U': 'CWUser'}])], - [self.ldap, self.system], None, {'U': 'table1.C0'}, []), - ('OneFetchStep', - [('Any X,XT LIMIT 10 OFFSET 10 WHERE X owned_by U, X title XT, EXISTS(U owned_by %s), U is CWUser, X is Card' % ueid, - [{'X': 'Card', 'U': 'CWUser', 'XT': 'String'}])], - 10, 10, [self.system], - {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1', 'U': 'table1.C0'}, []) - ]) - - def test_exists_base(self): - self._test('Any X,L,S WHERE X in_state S, X login L, EXISTS(X in_group G, G name "bougloup")', - [('FetchStep', [('Any X,L WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, {'X': 'table0.C0', 'X.login': 'table0.C1', 'L': 'table0.C1'}, []), - ('OneFetchStep', [("Any X,L,S WHERE X in_state S, X login L, " - 'EXISTS(X in_group G, G name "bougloup", G is CWGroup), S is State, X is CWUser', - [{'X': 'CWUser', 'L': 'String', 'S': 'State', 'G': 'CWGroup'}])], - None, None, [self.system], - {'X': 'table0.C0', 'X.login': 'table0.C1', 'L': 'table0.C1'}, [])]) - - def test_exists_complex(self): - self._test('Any G WHERE X in_group G, G name "managers", EXISTS(X copain T, T login in ("comme", "cochon"))', - [('FetchStep', [('Any T WHERE T login IN("comme", "cochon"), T is CWUser', [{'T': 'CWUser'}])], - [self.ldap, self.system], None, {'T': 'table0.C0'}, []), - ('OneFetchStep', - [('Any G WHERE X in_group G, G name "managers", EXISTS(X copain T, T is CWUser), G is CWGroup, X is CWUser', - [{'X': 'CWUser', 'T': 'CWUser', 'G': 'CWGroup'}])], - None, None, [self.system], {'T': 'table0.C0'}, [])]) - - def test_exists3(self): - self._test('Any G,L WHERE X in_group G, X login L, G name "managers", EXISTS(X copain T, T login in ("comme", "cochon"))', - [('FetchStep', - [('Any T WHERE T login IN("comme", "cochon"), T is CWUser', - [{'T': 'CWUser'}])], - [self.ldap, self.system], None, {'T': 'table0.C0'}, []), - ('FetchStep', - [('Any L,X WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, - {'X': 'table1.C1', 'X.login': 'table1.C0', 'L': 'table1.C0'}, []), - ('OneFetchStep', - [('Any G,L WHERE X in_group G, X login L, G name "managers", EXISTS(X copain T, T is CWUser), G is CWGroup, X is CWUser', - [{'G': 'CWGroup', 'L': 'String', 'T': 'CWUser', 'X': 'CWUser'}])], - None, None, - [self.system], {'T': 'table0.C0', 'X': 'table1.C1', 'X.login': 'table1.C0', 'L': 'table1.C0'}, [])]) - - def test_exists4(self): - self._test('Any G,L WHERE X in_group G, X login L, G name "managers", ' - 'EXISTS(X copain T, T login L, T login in ("comme", "cochon")) OR ' - 'EXISTS(X in_state S, S name "pascontent", NOT X copain T2, T2 login "billy")', - [('FetchStep', - [('Any T,L WHERE T login L, T login IN("comme", "cochon"), T is CWUser', [{'T': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, - {'T': 'table0.C0', 'T.login': 'table0.C1', 'L': 'table0.C1'}, []), - ('FetchStep', - [('Any T2 WHERE T2 login "billy", T2 is CWUser', [{'T2': 'CWUser'}])], - [self.ldap, self.system], None, {'T2': 'table1.C0'}, []), - ('FetchStep', - [('Any L,X WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, {'X': 'table2.C1', 'X.login': 'table2.C0', 'L': 'table2.C0'}, []), - ('OneFetchStep', - [('Any G,L WHERE X in_group G, X login L, G name "managers", (EXISTS(X copain T, T login L, T is CWUser)) OR (EXISTS(X in_state S, S name "pascontent", NOT EXISTS(X copain T2), S is State)), G is CWGroup, T2 is CWUser, X is CWUser', - [{'G': 'CWGroup', 'L': 'String', 'S': 'State', 'T': 'CWUser', 'T2': 'CWUser', 'X': 'CWUser'}])], - None, None, [self.system], - {'T2': 'table1.C0', 'L': 'table2.C0', - 'T': 'table0.C0', 'T.login': 'table0.C1', 'X': 'table2.C1', 'X.login': 'table2.C0'}, [])]) - - def test_exists5(self): - self._test('Any GN,L WHERE X in_group G, X login L, G name GN, ' - 'EXISTS(X copain T, T login in ("comme", "cochon")) AND ' - 'NOT EXISTS(X copain T2, T2 login "billy")', - [('FetchStep', [('Any T WHERE T login IN("comme", "cochon"), T is CWUser', - [{'T': 'CWUser'}])], - [self.ldap, self.system], None, {'T': 'table0.C0'}, []), - ('FetchStep', [('Any T2 WHERE T2 login "billy", T2 is CWUser', [{'T2': 'CWUser'}])], - [self.ldap, self.system], None, {'T2': 'table1.C0'}, []), - ('FetchStep', [('Any L,X WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], - [self.ldap, self.system], None, - {'X': 'table2.C1', 'X.login': 'table2.C0', 'L': 'table2.C0'}, []), - ('OneFetchStep', [('Any GN,L WHERE X in_group G, X login L, G name GN, EXISTS(X copain T, T is CWUser), NOT EXISTS(X copain T2, T2 is CWUser), G is CWGroup, X is CWUser', - [{'G': 'CWGroup', 'GN': 'String', 'L': 'String', 'T': 'CWUser', 'T2': 'CWUser', 'X': 'CWUser'}])], - None, None, [self.system], - {'T': 'table0.C0', 'T2': 'table1.C0', - 'X': 'table2.C1', 'X.login': 'table2.C0', 'L': 'table2.C0'}, [])]) - - def test_exists_security_no_invariant(self): - ueid = self.session.user.eid - self._test('Any X,AA,AB,AC,AD ORDERBY AA WHERE X is CWUser, X login AA, X firstname AB, X surname AC, X modification_date AD, A eid %(B)s, \ - EXISTS(((X identity A) OR \ - (EXISTS(X in_group C, C name IN("managers", "staff"), C is CWGroup))) OR \ - (EXISTS(X in_group D, A in_group D, NOT D name "users", D is CWGroup)))', - [('FetchStep', [('Any X,AA,AB,AC,AD WHERE X login AA, X firstname AB, X surname AC, X modification_date AD, X is CWUser', - [{'AA': 'String', 'AB': 'String', 'AC': 'String', 'AD': 'Datetime', - 'X': 'CWUser'}])], - [self.ldap, self.system], None, {'AA': 'table0.C1', 'AB': 'table0.C2', - 'AC': 'table0.C3', 'AD': 'table0.C4', - 'X': 'table0.C0', - 'X.firstname': 'table0.C2', - 'X.login': 'table0.C1', - 'X.modification_date': 'table0.C4', - 'X.surname': 'table0.C3'}, []), - ('OneFetchStep', [('Any X,AA,AB,AC,AD ORDERBY AA WHERE X login AA, X firstname AB, X surname AC, X modification_date AD, EXISTS(((X identity %(ueid)s) OR (EXISTS(X in_group C, C name IN("managers", "staff"), C is CWGroup))) OR (EXISTS(X in_group D, %(ueid)s in_group D, NOT D name "users", D is CWGroup))), X is CWUser' % {'ueid': ueid}, - [{'AA': 'String', 'AB': 'String', 'AC': 'String', 'AD': 'Datetime', - 'C': 'CWGroup', 'D': 'CWGroup', 'X': 'CWUser'}])], - None, None, [self.system], - {'AA': 'table0.C1', 'AB': 'table0.C2', 'AC': 'table0.C3', 'AD': 'table0.C4', - 'X': 'table0.C0', - 'X.firstname': 'table0.C2', 'X.login': 'table0.C1', 'X.modification_date': 'table0.C4', 'X.surname': 'table0.C3'}, - [])], - {'B': ueid}) - - def test_relation_need_split(self): - self._test('Any X, S WHERE X in_state S', - [('UnionStep', None, None, [ - ('OneFetchStep', [('Any X,S WHERE X in_state S, S is State, X is IN(Affaire, CWUser)', - [{'X': 'Affaire', 'S': 'State'}, {'X': 'CWUser', 'S': 'State'}])], - None, None, [self.system], {}, []), - ('OneFetchStep', [('Any X,S WHERE X in_state S, S is State, X is Note', - [{'X': 'Note', 'S': 'State'}])], - None, None, [self.cards, self.system], {}, []), - ])]) - - def test_relation_selection_need_split(self): - self._test('Any X,S,U WHERE X in_state S, X todo_by U', - [('FetchStep', [('Any X,S WHERE X in_state S, S is State, X is Note', - [{'X': 'Note', 'S': 'State'}])], - [self.cards, self.system], None, {'X': 'table0.C0', 'S': 'table0.C1'}, []), - ('UnionStep', None, None, - [('OneFetchStep', [('Any X,S,U WHERE X in_state S, X todo_by U, S is State, U is Personne, X is Affaire', - [{'X': 'Affaire', 'S': 'State', 'U': 'Personne'}])], - None, None, [self.system], {}, []), - ('OneFetchStep', [('Any X,S,U WHERE X todo_by U, S is State, U is CWUser, X is Note', - [{'X': 'Note', 'S': 'State', 'U': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C0', 'S': 'table0.C1'}, []), - ]) - ]) - - def test_relation_restriction_need_split(self): - self._test('Any X,U WHERE X in_state S, S name "pending", X todo_by U', - [('FetchStep', [('Any X WHERE X in_state S, S name "pending", S is State, X is Note', - [{'X': 'Note', 'S': 'State'}])], - [self.cards, self.system], None, {'X': 'table0.C0'}, []), - ('UnionStep', None, None, - [('OneFetchStep', [('Any X,U WHERE X todo_by U, U is CWUser, X is Note', - [{'X': 'Note', 'U': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C0'}, []), - ('OneFetchStep', [('Any X,U WHERE X in_state S, S name "pending", X todo_by U, S is State, U is Personne, X is Affaire', - [{'S': 'State', 'U': 'Personne', 'X': 'Affaire'}])], - None, None, [self.system], {}, []) - ]) - ]) - - def test_relation_restriction_ambigous_need_split(self): - self._test('Any X,T WHERE X in_state S, S name "pending", T tags X', - [('FetchStep', [('Any X WHERE X in_state S, S name "pending", S is State, X is Note', - [{'X': 'Note', 'S': 'State'}])], - [self.cards, self.system], None, {'X': 'table0.C0'}, []), - ('UnionStep', None, None, [ - ('OneFetchStep', [('Any X,T WHERE T tags X, T is Tag, X is Note', - [{'X': 'Note', 'T': 'Tag'}])], - None, None, - [self.system], {'X': 'table0.C0'}, []), - ('OneFetchStep', [('Any X,T WHERE X in_state S, S name "pending", T tags X, S is State, T is Tag, X is IN(Affaire, CWUser)', - [{'X': 'Affaire', 'S': 'State', 'T': 'Tag'}, - {'X': 'CWUser', 'S': 'State', 'T': 'Tag'}])], - None, None, - [self.system], {}, []), - ]) - ]) - - def test_not_relation_no_split_internal(self): - ueid = self.session.user.eid - # NOT on a relation supported by rql and system source: we want to get - # all states (eg from both sources) which are not related to entity with the - # given eid. The "NOT X in_state S, X eid %(x)s" expression is necessarily true - # in the source where %(x)s is not coming from and will be removed during rql - # generation for the external source - self._test('Any SN WHERE NOT X in_state S, X eid %(x)s, S name SN', - [('OneFetchStep', [('Any SN WHERE NOT EXISTS(%s in_state S), S name SN, S is State' % ueid, - [{'S': 'State', 'SN': 'String'}])], - None, None, [self.cards, self.system], {}, [])], - {'x': ueid}) - - def test_not_relation_no_split_external(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - # similar to the above test but with an eid coming from the external source. - # the same plan may be used, since we won't find any record in the system source - # linking 9999999 to a state - self._test('Any SN WHERE NOT X in_state S, X eid %(x)s, S name SN', - [('OneFetchStep', [('Any SN WHERE NOT EXISTS(999999 in_state S), S name SN, S is State', - [{'S': 'State', 'SN': 'String'}])], - None, None, [self.cards, self.system], {}, [])], - {'x': 999999}) - - def test_not_relation_need_split(self): - self._test('Any SN WHERE NOT X in_state S, S name SN', - [('FetchStep', [('Any SN,S WHERE S name SN, S is State', - [{'S': 'State', 'SN': 'String'}])], - [self.cards, self.system], None, {'S': 'table0.C1', 'S.name': 'table0.C0', 'SN': 'table0.C0'}, - []), - ('IntersectStep', None, None, - [('OneFetchStep', - [('Any SN WHERE NOT EXISTS(X in_state S, X is Note), S name SN, S is State', - [{'S': 'State', 'SN': 'String', 'X': 'Note'}])], - None, None, [self.cards, self.system], {}, - []), - ('OneFetchStep', - [('Any SN WHERE NOT EXISTS(X in_state S, X is IN(Affaire, CWUser)), S name SN, S is State', - [{'S': 'State', 'SN': 'String', 'X': 'Affaire'}, - {'S': 'State', 'SN': 'String', 'X': 'CWUser'}])], - None, None, [self.system], {'S': 'table0.C1', 'S.name': 'table0.C0', 'SN': 'table0.C0'}, - []),] - )]) - - def test_external_attributes_and_relation(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any A,B,C,D WHERE A eid %(x)s,A creation_date B,A modification_date C, A todo_by D?', - [('FetchStep', [('Any A,B,C WHERE A eid 999999, A creation_date B, A modification_date C, A is Note', - [{'A': 'Note', 'C': 'Datetime', 'B': 'Datetime'}])], - [self.cards], None, - {'A': 'table0.C0', 'A.creation_date': 'table0.C1', 'A.modification_date': 'table0.C2', 'C': 'table0.C2', 'B': 'table0.C1'}, []), - #('FetchStep', [('Any D WHERE D is CWUser', [{'D': 'CWUser'}])], - # [self.ldap, self.system], None, {'D': 'table1.C0'}, []), - ('OneFetchStep', [('Any A,B,C,D WHERE A creation_date B, A modification_date C, A todo_by D?, A is Note, D is CWUser', - [{'A': 'Note', 'C': 'Datetime', 'B': 'Datetime', 'D': 'CWUser'}])], - None, None, [self.system], - {'A': 'table0.C0', 'A.creation_date': 'table0.C1', 'A.modification_date': 'table0.C2', 'C': 'table0.C2', 'B': 'table0.C1'}, [])], - {'x': 999999}) - - - def test_simplified_var_1(self): - ueid = self.session.user.eid - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - # need access to cards source since X table has to be accessed because of the outer join - self._test('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR ' - '(X require_permission P?, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s', - [('FetchStep', - [('Any 999999', [{}])], [self.cards], - None, {u'%(x)s': 'table0.C0'}, []), - ('OneFetchStep', - [(u'Any 6 WHERE 6 in_group G, (G name IN("managers", "logilab")) OR ' - '(X require_permission P?, P name "bla", P require_group G), ' - 'G is CWGroup, P is CWPermission, X is Note', - [{'G': 'CWGroup', 'P': 'CWPermission', 'X': 'Note'}])], - None, None, [self.system], {u'%(x)s': 'table0.C0'}, [])], - {'x': 999999, 'u': ueid}) - - def test_simplified_var_2(self): - ueid = self.session.user.eid - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - # no need access to source since X is invariant - self._test('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR ' - '(X require_permission P, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s', - [('OneFetchStep', [('Any %s WHERE %s in_group G, (G name IN("managers", "logilab")) OR (999999 require_permission P, P name "bla", P require_group G)' % (ueid, ueid), - [{'G': 'CWGroup', 'P': 'CWPermission'}])], - None, None, [self.system], {}, [])], - {'x': 999999, 'u': ueid}) - - def test_has_text(self): - self._test('Card X WHERE X has_text "toto"', - [('OneFetchStep', [('Any X WHERE X has_text "toto", X is Card', - [{'X': 'Card'}])], - None, None, [self.system], {}, [])]) - - def test_has_text_3(self): - self._test('Any X WHERE X has_text "toto", X title "zoubidou", X is IN (Card, EmailThread)', - [('FetchStep', [(u'Any X WHERE X title "zoubidou", X is Card', - [{'X': 'Card'}])], - [self.cards, self.system], None, {'X': 'table0.C0'}, []), - ('UnionStep', None, None, [ - ('OneFetchStep', [(u'Any X WHERE X has_text "toto", X is Card', - [{'X': 'Card'}])], - None, None, [self.system], {'X': 'table0.C0'}, []), - ('OneFetchStep', [(u'Any X WHERE X has_text "toto", X title "zoubidou", X is EmailThread', - [{'X': 'EmailThread'}])], - None, None, [self.system], {}, []), - ]), - ]) - - def test_has_text_orderby_rank(self): - self._test('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', - [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C0'}, []), - ('AggrStep', 'SELECT table1.C1 FROM table1\nORDER BY table1.C0', None, [ - ('FetchStep', [('Any FTIRANK(X),X WHERE X has_text "bla", X is CWUser', - [{'X': 'CWUser'}])], - [self.system], {'X': 'table0.C0'}, {'FTIRANK(X)': 'table1.C0', 'X': 'table1.C1'}, []), - ('FetchStep', [('Any FTIRANK(X),X WHERE X has_text "bla", X firstname "bla", X is Personne', - [{'X': 'Personne'}])], - [self.system], {}, {'FTIRANK(X)': 'table1.C0', 'X': 'table1.C1'}, []), - ]), - ]) - - def test_security_has_text_orderby_rank(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', - [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table1.C0'}, []), - ('UnionFetchStep', - [('FetchStep', [('Any X WHERE X firstname "bla", X is Personne', [{'X': 'Personne'}])], - [self.system], {}, {'X': 'table0.C0'}, []), - ('FetchStep', [('Any X WHERE EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])], - [self.system], {'X': 'table1.C0'}, {'X': 'table0.C0'}, [])]), - ('OneFetchStep', [('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla"', - [{'X': 'CWUser'}, {'X': 'Personne'}])], - None, None, [self.system], {'X': 'table0.C0'}, []), - ]) - - def test_has_text_select_rank(self): - self._test('Any X, FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', - # XXX unecessary duplicate selection - [('FetchStep', [('Any X,X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C1'}, []), - ('UnionStep', None, None, [ - ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X is CWUser', [{'X': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C1'}, []), - ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])], - None, None, [self.system], {}, []), - ]), - ]) - - def test_security_has_text_select_rank(self): - # use a guest user - self.session = self.user_groups_session('guests') - ueid = self.session.user.eid - self._test('Any X, FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', - [('FetchStep', [('Any X,X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C1'}, []), - ('UnionStep', None, None, [ - ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C1'}, []), - ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])], - None, None, [self.system], {}, []), - ]), - ]) - - def test_sort_func(self): - self._test('Note X ORDERBY DUMB_SORT(RF) WHERE X type RF', - [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY DUMB_SORT(table0.C1)', None, [ - ('FetchStep', [('Any X,RF WHERE X type RF, X is Note', - [{'X': 'Note', 'RF': 'String'}])], - [self.cards, self.system], {}, {'X': 'table0.C0', 'X.type': 'table0.C1', 'RF': 'table0.C1'}, []), - ]) - ]) - - def test_ambigous_sort_func(self): - self._test('Any X ORDERBY DUMB_SORT(RF) WHERE X title RF, X is IN (Bookmark, Card, EmailThread)', - [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY DUMB_SORT(table0.C1)', None, - [('FetchStep', [('Any X,RF WHERE X title RF, X is Card', - [{'X': 'Card', 'RF': 'String'}])], - [self.cards, self.system], {}, - {'X': 'table0.C0', 'X.title': 'table0.C1', 'RF': 'table0.C1'}, []), - ('FetchStep', [('Any X,RF WHERE X title RF, X is IN(Bookmark, EmailThread)', - [{'RF': 'String', 'X': 'Bookmark'}, - {'RF': 'String', 'X': 'EmailThread'}])], - [self.system], {}, - {'X': 'table0.C0', 'X.title': 'table0.C1', 'RF': 'table0.C1'}, []), - ]), - ]) - - def test_attr_unification_1(self): - self._test('Any X,Y WHERE X is Bookmark, Y is Card, X title T, Y title T', - [('FetchStep', - [('Any Y,T WHERE Y title T, Y is Card', [{'T': 'String', 'Y': 'Card'}])], - [self.cards, self.system], None, - {'T': 'table0.C1', 'Y': 'table0.C0', 'Y.title': 'table0.C1'}, []), - ('OneFetchStep', - [('Any X,Y WHERE X title T, Y title T, X is Bookmark, Y is Card', - [{'T': 'String', 'X': 'Bookmark', 'Y': 'Card'}])], - None, None, [self.system], - {'T': 'table0.C1', 'Y': 'table0.C0', 'Y.title': 'table0.C1'}, []) - ]) - - def test_attr_unification_2(self): - self._test('Any X,Y WHERE X is Note, Y is Card, X type T, Y title T', - [('FetchStep', - [('Any X,T WHERE X type T, X is Note', [{'T': 'String', 'X': 'Note'}])], - [self.cards, self.system], None, - {'T': 'table0.C1', 'X': 'table0.C0', 'X.type': 'table0.C1'}, []), - ('FetchStep', - [('Any Y,T WHERE Y title T, Y is Card', [{'T': 'String', 'Y': 'Card'}])], - [self.cards, self.system], None, - {'T': 'table1.C1', 'Y': 'table1.C0', 'Y.title': 'table1.C1'}, []), - ('OneFetchStep', - [('Any X,Y WHERE X type T, Y title T, X is Note, Y is Card', - [{'T': 'String', 'X': 'Note', 'Y': 'Card'}])], - None, None, [self.system], - {'T': 'table1.C1', - 'X': 'table0.C0', 'X.type': 'table0.C1', - 'Y': 'table1.C0', 'Y.title': 'table1.C1'}, []) - ]) - - def test_attr_unification_neq_1(self): - self._test('Any X,Y WHERE X is Bookmark, Y is Card, X creation_date D, Y creation_date > D', - [('FetchStep', - [('Any Y,D WHERE Y creation_date D, Y is Card', - [{'D': 'Datetime', 'Y': 'Card'}])], - [self.cards,self.system], None, - {'D': 'table0.C1', 'Y': 'table0.C0', 'Y.creation_date': 'table0.C1'}, []), - ('OneFetchStep', - [('Any X,Y WHERE X creation_date D, Y creation_date > D, X is Bookmark, Y is Card', - [{'D': 'Datetime', 'X': 'Bookmark', 'Y': 'Card'}])], None, None, - [self.system], - {'D': 'table0.C1', 'Y': 'table0.C0', 'Y.creation_date': 'table0.C1'}, []) - ]) - - def test_subquery_1(self): - ueid = self.session.user.eid - self._test('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by D), D eid %(E)s ' - 'WITH A,N BEING ((Any X,N WHERE X is Tag, X name N) UNION (Any X,T WHERE X is Bookmark, X title T))', - [('FetchStep', [('Any X,N WHERE X is Tag, X name N', [{'N': 'String', 'X': 'Tag'}]), - ('Any X,T WHERE X is Bookmark, X title T', - [{'T': 'String', 'X': 'Bookmark'}])], - [self.system], {}, {'N': 'table0.C1', 'X': 'table0.C0', 'X.name': 'table0.C1'}, []), - ('FetchStep', - [('Any B,C WHERE B login C, B is CWUser', [{'B': 'CWUser', 'C': 'String'}])], - [self.ldap, self.system], None, {'B': 'table1.C0', 'B.login': 'table1.C1', 'C': 'table1.C1'}, []), - ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by %s), B is CWUser, A is IN(Bookmark, Tag)' % ueid, - [{'A': 'Bookmark', 'B': 'CWUser', 'C': 'String'}, - {'A': 'Tag', 'B': 'CWUser', 'C': 'String'}])], - None, None, [self.system], - {'A': 'table0.C0', - 'B': 'table1.C0', 'B.login': 'table1.C1', - 'C': 'table1.C1', - 'N': 'table0.C1'}, - [])], - {'E': ueid}) - - def test_subquery_2(self): - ueid = self.session.user.eid - self._test('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by D), D eid %(E)s ' - 'WITH A,N BEING ((Any X,N WHERE X is Tag, X name N) UNION (Any X,T WHERE X is Card, X title T))', - [('UnionFetchStep', - [('FetchStep', [('Any X,N WHERE X is Tag, X name N', [{'N': 'String', 'X': 'Tag'}])], - [self.system], {}, - {'N': 'table0.C1', - 'T': 'table0.C1', - 'X': 'table0.C0', - 'X.name': 'table0.C1', - 'X.title': 'table0.C1'}, []), - ('FetchStep', [('Any X,T WHERE X is Card, X title T', - [{'T': 'String', 'X': 'Card'}])], - [self.cards, self.system], {}, - {'N': 'table0.C1', - 'T': 'table0.C1', - 'X': 'table0.C0', - 'X.name': 'table0.C1', - 'X.title': 'table0.C1'}, []), - ]), - ('FetchStep', - [('Any B,C WHERE B login C, B is CWUser', [{'B': 'CWUser', 'C': 'String'}])], - [self.ldap, self.system], None, {'B': 'table1.C0', 'B.login': 'table1.C1', 'C': 'table1.C1'}, []), - ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by %s), B is CWUser, A is IN(Card, Tag)' % ueid, - [{'A': 'Card', 'B': 'CWUser', 'C': 'String'}, - {'A': 'Tag', 'B': 'CWUser', 'C': 'String'}])], - None, None, [self.system], - {'A': 'table0.C0', - 'B': 'table1.C0', 'B.login': 'table1.C1', - 'C': 'table1.C1', - 'N': 'table0.C1'}, - [])], - {'E': ueid}) - - def test_eid_dont_cross_relation_1(self): - repo._type_source_cache[999999] = ('Personne', 'system', 999999, 'system') - self._test('Any Y,YT WHERE X eid %(x)s, X fiche Y, Y title YT', - [('OneFetchStep', [('Any Y,YT WHERE X eid 999999, X fiche Y, Y title YT', - [{'X': 'Personne', 'Y': 'Card', 'YT': 'String'}])], - None, None, [self.system], {}, [])], - {'x': 999999}) - - def test_eid_dont_cross_relation_2(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self.cards.dont_cross_relations.add('concerne') - try: - self._test('Any Y,S,YT,X WHERE Y concerne X, Y in_state S, X eid 999999, Y ref YT', - [('OneFetchStep', [('Any Y,S,YT,999999 WHERE Y concerne 999999, Y in_state S, Y ref YT', - [{'Y': 'Affaire', 'YT': 'String', 'S': 'State'}])], - None, None, [self.system], {}, [])], - {'x': 999999}) - finally: - self.cards.dont_cross_relations.remove('concerne') - - - # external source w/ .cross_relations == ['multisource_crossed_rel'] ###### - - def test_crossed_relation_eid_1_invariant(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y', - [('OneFetchStep', [('Any Y WHERE 999999 multisource_crossed_rel Y', [{u'Y': 'Note'}])], - None, None, [self.system], {}, []) - ], - {'x': 999999,}) - - def test_crossed_relation_eid_1_needattr(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T', - [('FetchStep', [('Any Y,T WHERE Y type T, Y is Note', [{'T': 'String', 'Y': 'Note'}])], - [self.cards, self.system], None, - {'T': 'table0.C1', 'Y': 'table0.C0', 'Y.type': 'table0.C1'}, []), - ('OneFetchStep', [('Any Y,T WHERE 999999 multisource_crossed_rel Y, Y type T, Y is Note', - [{'T': 'String', 'Y': 'Note'}])], - None, None, [self.system], - {'T': 'table0.C1', 'Y': 'table0.C0', 'Y.type': 'table0.C1'}, []), - ], - {'x': 999999,}) - - def test_crossed_relation_eid_2_invariant(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y', - [('OneFetchStep', [('Any Y WHERE 999999 multisource_crossed_rel Y, Y is Note', [{'Y': 'Note'}])], - None, None, [self.cards, self.system], {}, []) - ], - {'x': 999999,}) - - def test_crossed_relation_eid_2_needattr(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T', - [('OneFetchStep', [('Any Y,T WHERE 999999 multisource_crossed_rel Y, Y type T, Y is Note', - [{'T': 'String', 'Y': 'Note'}])], - None, None, [self.cards, self.system], {}, - []), - ], - {'x': 999999,}) - - def test_crossed_relation_eid_not_1(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('Any Y WHERE X eid %(x)s, NOT X multisource_crossed_rel Y', - [('FetchStep', [('Any Y WHERE Y is Note', [{'Y': 'Note'}])], - [self.cards, self.system], None, {'Y': 'table0.C0'}, []), - ('OneFetchStep', [('Any Y WHERE NOT EXISTS(999999 multisource_crossed_rel Y), Y is Note', - [{'Y': 'Note'}])], - None, None, [self.system], - {'Y': 'table0.C0'}, [])], - {'x': 999999,}) - -# def test_crossed_relation_eid_not_2(self): -# repo._type_source_cache[999999] = ('Note', 'cards', 999999) -# self._test('Any Y WHERE X eid %(x)s, NOT X multisource_crossed_rel Y', -# [], -# {'x': 999999,}) - - def test_crossed_relation_base_XXXFIXME(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('Any X,Y,T WHERE X multisource_crossed_rel Y, Y type T, X type T', - [('FetchStep', [('Any X,T WHERE X type T, X is Note', [{'T': 'String', 'X': 'Note'}])], - [self.cards, self.system], None, - {'T': 'table0.C1', 'X': 'table0.C0', 'X.type': 'table0.C1'}, []), - ('FetchStep', [('Any Y,T WHERE Y type T, Y is Note', [{'T': 'String', 'Y': 'Note'}])], - [self.cards, self.system], None, - {'T': 'table1.C1', 'Y': 'table1.C0', 'Y.type': 'table1.C1'}, []), - ('FetchStep', [('Any X,Y WHERE X multisource_crossed_rel Y, X is Note, Y is Note', - [{'X': 'Note', 'Y': 'Note'}])], - [self.cards, self.system], None, - {'X': 'table2.C0', 'Y': 'table2.C1'}, - []), - ('OneFetchStep', [('Any X,Y,T WHERE X multisource_crossed_rel Y, Y type T, X type T, ' - 'X is Note, Y is Note, Y identity A, X identity B, A is Note, B is Note', - [{u'A': 'Note', u'B': 'Note', 'T': 'String', 'X': 'Note', 'Y': 'Note'}])], - None, None, - [self.system], - {'A': 'table1.C0', - 'B': 'table0.C0', - 'T': 'table1.C1', - 'X': 'table2.C0', - 'X.type': 'table0.C1', - 'Y': 'table2.C1', - 'Y.type': 'table1.C1'}, - []), - ], - {'x': 999999,}) - - def test_crossed_relation_noeid_needattr(self): - # http://www.cubicweb.org/ticket/1382452 - self._test('DISTINCT Any DEP WHERE DEP is Note, P type "cubicweb-foo", P multisource_crossed_rel DEP, DEP type LIKE "cubicweb%"', - [('FetchStep', [(u'Any DEP WHERE DEP type LIKE "cubicweb%", DEP is Note', - [{'DEP': 'Note'}])], - [self.cards, self.system], None, - {'DEP': 'table0.C0'}, - []), - ('FetchStep', [(u'Any P WHERE P type "cubicweb-foo", P is Note', [{'P': 'Note'}])], - [self.cards, self.system], None, {'P': 'table1.C0'}, - []), - ('FetchStep', [('Any DEP,P WHERE P multisource_crossed_rel DEP, DEP is Note, P is Note', - [{'DEP': 'Note', 'P': 'Note'}])], - [self.cards, self.system], None, {'DEP': 'table2.C0', 'P': 'table2.C1'}, - []), - ('OneFetchStep', - [('DISTINCT Any DEP WHERE P multisource_crossed_rel DEP, DEP is Note, ' - 'P is Note, DEP identity A, P identity B, A is Note, B is Note', - [{u'A': 'Note', u'B': 'Note', 'DEP': 'Note', 'P': 'Note'}])], - None, None, [self.system], - {'A': 'table0.C0', 'B': 'table1.C0', 'DEP': 'table2.C0', 'P': 'table2.C1'}, - [])]) - - def test_crossed_relation_noeid_invariant(self): - # see comment in http://www.cubicweb.org/ticket/1382452 - self.schema.add_relation_def( - RelationDefinition(subject='Note', name='multisource_crossed_rel', object='Affaire')) - self.repo.set_schema(self.schema) - try: - self._test('DISTINCT Any P,DEP WHERE P type "cubicweb-foo", P multisource_crossed_rel DEP', - [('FetchStep', - [('Any DEP WHERE DEP is Note', [{'DEP': 'Note'}])], - [self.cards, self.system], None, {'DEP': 'table0.C0'}, []), - ('FetchStep', - [(u'Any P WHERE P type "cubicweb-foo", P is Note', [{'P': 'Note'}])], - [self.cards, self.system], None, {'P': 'table1.C0'}, []), - ('UnionStep', None, None, - [('OneFetchStep', - [('DISTINCT Any P,DEP WHERE P multisource_crossed_rel DEP, DEP is Note, P is Note', - [{'DEP': 'Note', 'P': 'Note'}])], - None, None, [self.cards], None, []), - ('OneFetchStep', - [('DISTINCT Any P,DEP WHERE P multisource_crossed_rel DEP, DEP is Note, P is Note', - [{'DEP': 'Note', 'P': 'Note'}])], - None, None, [self.system], - {'DEP': 'table0.C0', 'P': 'table1.C0'}, - []), - ('OneFetchStep', - [('DISTINCT Any P,DEP WHERE P multisource_crossed_rel DEP, DEP is Affaire, P is Note', - [{'DEP': 'Affaire', 'P': 'Note'}])], - None, None, [self.system], {'P': 'table1.C0'}, - [])]) - ]) - finally: - self.schema.del_relation_def('Note', 'multisource_crossed_rel', 'Affaire') - self.repo.set_schema(self.schema) - - # edition queries tests ################################################### - - def test_insert_simplified_var_1(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - repo._type_source_cache[999998] = ('State', 'system', None, 'system') - self._test('INSERT Note X: X in_state S, X type T WHERE S eid %(s)s, N eid %(n)s, N type T', - [('InsertStep', - [('InsertRelationsStep', - [('OneFetchStep', [('Any T WHERE N eid 999999, N type T, N is Note', - [{'N': 'Note', 'T': 'String'}])], - None, None, [self.cards], {}, [])]) - ]) - ], - {'n': 999999, 's': 999998}) - - def test_insert_simplified_var_2(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - repo._type_source_cache[999998] = ('State', 'system', None, 'system') - self._test('INSERT Note X: X in_state S, X type T, X migrated_from N WHERE S eid %(s)s, N eid %(n)s, N type T', - [('InsertStep', - [('InsertRelationsStep', - [('OneFetchStep', [('Any T WHERE N eid 999999, N type T, N is Note', - [{'N': 'Note', 'T': 'String'}])], - None, None, [self.cards], {}, []) - ]) - ]) - ], - {'n': 999999, 's': 999998}) - - def test_insert_simplified_var_3(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - repo._type_source_cache[999998] = ('State', 'cards', 999998, 'cards') - self._test('INSERT Note X: X in_state S, X type T WHERE S eid %(s)s, N eid %(n)s, N type T', - [('InsertStep', - [('InsertRelationsStep', - [('OneFetchStep', [('Any T WHERE N eid 999999, N type T, N is Note', - [{'N': 'Note', 'T': 'String'}])], - None, None, [self.cards], {}, [])] - )] - )], - {'n': 999999, 's': 999998}) - - def test_insert_simplified_var_4(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - repo._type_source_cache[999998] = ('State', 'system', None, 'system') - self._test('INSERT Note X: X in_state S, X type "bla", X migrated_from N WHERE S eid %(s)s, N eid %(n)s', - [('InsertStep', - [('InsertRelationsStep', - [('OneFetchStep', - [('Any 999999', [{}])], - None, None, - [self.system], {}, - [])])] - )], - {'n': 999999, 's': 999998}) - - def test_insert_simplified_var_5(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - repo._type_source_cache[999998] = ('State', 'system', None, 'system') - self._test('INSERT Note X: X in_state S, X type "bla", X migrated_from N WHERE S eid %(s)s, N eid %(n)s, A concerne N', - [('InsertStep', - [('InsertRelationsStep', - [('OneFetchStep', - [('Any A WHERE A concerne 999999, A is Affaire', - [{'A': 'Affaire'}])], - None, None, [self.system], {}, []), - ]), - ]) - ], - {'n': 999999, 's': 999998}) - - def test_delete_relation1(self): - ueid = self.session.user.eid - self._test('DELETE X created_by Y WHERE X eid %(x)s, NOT Y eid %(y)s', - [('DeleteRelationsStep', [ - ('OneFetchStep', [('Any %s,Y WHERE %s created_by Y, NOT Y eid %s, Y is CWUser' % (ueid, ueid, ueid), - [{'Y': 'CWUser'}])], - None, None, [self.system], {}, []), - ]), - ], - {'x': ueid, 'y': ueid}) - - def test_delete_relation2(self): - ueid = self.session.user.eid - self._test('DELETE X created_by Y WHERE X eid %(x)s, NOT Y login "syt"', - [('FetchStep', [('Any Y WHERE NOT Y login "syt", Y is CWUser', [{'Y': 'CWUser'}])], - [self.ldap, self.system], None, {'Y': 'table0.C0'}, []), - ('DeleteRelationsStep', [ - ('OneFetchStep', [('Any %s,Y WHERE %s created_by Y, Y is CWUser'%(ueid,ueid), [{'Y': 'CWUser'}])], - None, None, [self.system], {'Y': 'table0.C0'}, []), - ]), - ], - {'x': ueid, 'y': ueid}) - - def test_delete_relation3(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self.assertRaises( - BadRQLQuery, self._test, - 'DELETE Y multisource_inlined_rel X WHERE X eid %(x)s, ' - 'NOT (Y cw_source S, S name %(source)s)', [], - {'x': 999999, 'source': 'cards'}) - - def test_delete_relation4(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self.assertRaises( - BadRQLQuery, self._test, - 'DELETE X multisource_inlined_rel Y WHERE Y is Note, X eid %(x)s, ' - 'NOT (Y cw_source S, S name %(source)s)', [], - {'x': 999999, 'source': 'cards'}) - - def test_delete_entity1(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('DELETE Note X WHERE X eid %(x)s, NOT Y multisource_rel X', - [('DeleteEntitiesStep', - [('OneFetchStep', [('Any 999999 WHERE NOT EXISTS(Y multisource_rel 999999), Y is IN(Card, Note)', - [{'Y': 'Card'}, {'Y': 'Note'}])], - None, None, [self.system], {}, []) - ]) - ], - {'x': 999999}) - - def test_delete_entity2(self): - repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('DELETE Note X WHERE X eid %(x)s, NOT X multisource_inlined_rel Y', - [('DeleteEntitiesStep', - [('OneFetchStep', [('Any X WHERE X eid 999999, NOT X multisource_inlined_rel Y, X is Note, Y is IN(Affaire, Note)', - [{'X': 'Note', 'Y': 'Affaire'}, {'X': 'Note', 'Y': 'Note'}])], - None, None, [self.system], {}, []) - ]) - ], - {'x': 999999}) - - def test_update(self): - self._test('SET X copain Y WHERE X login "comme", Y login "cochon"', - [('FetchStep', - [('Any X WHERE X login "comme", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C0'}, []), - ('FetchStep', - [('Any Y WHERE Y login "cochon", Y is CWUser', [{'Y': 'CWUser'}])], - [self.ldap, self.system], None, {'Y': 'table1.C0'}, []), - ('UpdateStep', - [('OneFetchStep', - [('DISTINCT Any X,Y WHERE X is CWUser, Y is CWUser', - [{'X': 'CWUser', 'Y': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C0', 'Y': 'table1.C0'}, []) - ]) - ]) - - def test_update2(self): - self._test('SET U in_group G WHERE G name ~= "bougloup%", U login "admin"', - [('FetchStep', [('Any U WHERE U login "admin", U is CWUser', [{'U': 'CWUser'}])], - [self.ldap, self.system], None, {'U': 'table0.C0'}, []), - ('UpdateStep', [ - ('OneFetchStep', [('DISTINCT Any U,G WHERE G name ILIKE "bougloup%", G is CWGroup, U is CWUser', - [{'U': 'CWUser', 'G': 'CWGroup'}])], - None, None, [self.system], {'U': 'table0.C0'}, []), - ]), - ]) - - def test_update3(self): - anoneid = self.user_groups_session('guests').user.eid - # since we are adding a in_state relation for an entity in the system - # source, states should only be searched in the system source as well - self._test('SET X in_state S WHERE X eid %(x)s, S name "deactivated"', - [('UpdateStep', [ - ('OneFetchStep', [('DISTINCT Any S WHERE S name "deactivated", S is State', - [{'S': 'State'}])], - None, None, [self.system], {}, []), - ]), - ], - {'x': anoneid}) - -# def test_update4(self): -# # since we are adding a in_state relation with a state from the system -# # source, CWUser should only be searched only in the system source as well -# rset = self.execute('State X WHERE X name "activated"') -# assert len(rset) == 1, rset -# activatedeid = rset[0][0] -# self._test('SET X in_state S WHERE X is CWUser, S eid %s' % activatedeid, -# [('UpdateStep', [ -# ('OneFetchStep', [('DISTINCT Any X,%s WHERE X is CWUser' % activatedeid, -# [{'X': 'CWUser'}])], -# None, None, [self.system], {}, []), -# ]), -# ]) - - def test_ldap_user_related_to_invariant_and_dont_cross_rel(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self.cards.dont_cross_relations.add('created_by') - try: - self._test('Any X,XL WHERE E eid %(x)s, E created_by X, X login XL', - [('FetchStep', [('Any X,XL WHERE X login XL, X is CWUser', - [{'X': 'CWUser', 'XL': 'String'}])], - [self.ldap, self.system], None, - {'X': 'table0.C0', 'X.login': 'table0.C1', 'XL': 'table0.C1'}, - []), - ('OneFetchStep', - [('Any X,XL WHERE 999999 created_by X, X login XL, X is CWUser', - [{'X': 'CWUser', 'XL': 'String'}])], - None, None, - [self.system], - {'X': 'table0.C0', 'X.login': 'table0.C1', 'XL': 'table0.C1'}, - [])], - {'x': 999999}) - finally: - self.cards.dont_cross_relations.remove('created_by') - - def test_ambigous_cross_relation(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self.cards.support_relations['see_also'] = True - self.cards.cross_relations.add('see_also') - try: - self._test('Any X,AA ORDERBY AA WHERE E eid %(x)s, E see_also X, X modification_date AA', - [('AggrStep', - 'SELECT table0.C0, table0.C1 FROM table0\nORDER BY table0.C1', - None, - [('FetchStep', - [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Note', - [{'AA': 'Datetime', 'X': 'Note'}])], [self.cards, self.system], {}, - {'AA': 'table0.C1', 'X': 'table0.C0', - 'X.modification_date': 'table0.C1'}, - []), - ('FetchStep', - [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Bookmark', - [{'AA': 'Datetime', 'X': 'Bookmark'}])], - [self.system], {}, - {'AA': 'table0.C1', 'X': 'table0.C0', - 'X.modification_date': 'table0.C1'}, - [])])], - {'x': 999999}) - finally: - del self.cards.support_relations['see_also'] - self.cards.cross_relations.remove('see_also') - - def test_state_of_cross(self): - self._test('DELETE State X WHERE NOT X state_of Y', - [('DeleteEntitiesStep', - [('OneFetchStep', - [('Any X WHERE NOT X state_of Y, X is State, Y is Workflow', - [{'X': 'State', 'Y': 'Workflow'}])], - None, None, [self.system], {}, [])])] - ) - - - def test_source_specified_0_0(self): - self._test('Card X WHERE X cw_source S, S eid 1', - [('OneFetchStep', [('Any X WHERE X cw_source 1, X is Card', - [{'X': 'Card'}])], - None, None, - [self.system],{}, []) - ]) - - def test_source_specified_0_1(self): - self._test('Any X, S WHERE X is Card, X cw_source S, S eid 1', - [('OneFetchStep', [('Any X,1 WHERE X is Card, X cw_source 1', - [{'X': 'Card'}])], - None, None, - [self.system],{}, []) - ]) - - def test_source_specified_1_0(self): - self._test('Card X WHERE X cw_source S, S name "system"', - [('OneFetchStep', [('Any X WHERE X cw_source S, S name "system", X is Card', - [{'X': 'Card', 'S': 'CWSource'}])], - None, None, - [self.system],{}, []) - ]) - - def test_source_specified_1_1(self): - self._test('Any X, SN WHERE X is Card, X cw_source S, S name "system", S name SN', - [('OneFetchStep', [('Any X,SN WHERE X is Card, X cw_source S, S name "system", ' - 'S name SN', - [{'S': 'CWSource', 'SN': 'String', 'X': 'Card'}])], - None, None, [self.system], {}, []) - ]) - - def test_source_specified_1_2(self): - self._test('Card X WHERE X cw_source S, S name "datafeed"', - [('OneFetchStep', [('Any X WHERE X cw_source S, S name "datafeed", X is Card', - [{'X': 'Card', 'S': 'CWSource'}])], - None, None, - [self.system],{}, []) - ]) - - def test_source_specified_1_3(self): - self._test('Any X, SN WHERE X is Card, X cw_source S, S name "datafeed", S name SN', - [('OneFetchStep', [('Any X,SN WHERE X is Card, X cw_source S, S name "datafeed", ' - 'S name SN', - [{'S': 'CWSource', 'SN': 'String', 'X': 'Card'}])], - None, None, [self.system], {}, []) - ]) - - def test_source_specified_1_4(self): - sols = [] - for sol in X_ALL_SOLS: - sol = sol.copy() - sol['S'] = 'CWSource' - sols.append(sol) - self._test('Any X WHERE X cw_source S, S name "cards"', - [('OneFetchStep', [('Any X WHERE X cw_source S, S name "cards"', - sols)], - None, None, - [self.system],{}, []) - ]) - - def test_source_specified_2_0(self): - # self._test('Card X WHERE X cw_source S, NOT S eid 1', - # [('OneFetchStep', [('Any X WHERE X is Card', - # [{'X': 'Card'}])], - # None, None, - # [self.cards],{}, []) - # ]) - self._test('Card X WHERE NOT X cw_source S, S eid 1', - [('OneFetchStep', [('Any X WHERE X is Card', - [{'X': 'Card'}])], - None, None, - [self.cards],{}, []) - ]) - - def test_source_specified_2_1(self): - self._test('Card X WHERE X cw_source S, NOT S name "system"', - [('OneFetchStep', [('Any X WHERE X is Card', - [{'X': 'Card'}])], - None, None, - [self.cards],{}, []) - ]) - self._test('Card X WHERE NOT X cw_source S, S name "system"', - [('OneFetchStep', [('Any X WHERE X is Card', - [{'X': 'Card'}])], - None, None, - [self.cards],{}, []) - ]) - - def test_source_specified_3_1(self): - self._test('Any X,XT WHERE X is Card, X title XT, X cw_source S, S name "cards"', - [('OneFetchStep', - [('Any X,XT WHERE X is Card, X title XT', - [{'X': 'Card', 'XT': 'String'}])], - None, None, [self.cards], {}, []) - ]) - - def test_source_specified_3_2(self): - self._test('Any X,XT WHERE X is Card, X title XT, X cw_source S, S name "datafeed"', - [('OneFetchStep', - [('Any X,XT WHERE X is Card, X title XT, X cw_source S, S name "datafeed"', - [{'X': 'Card', 'XT': 'String', 'S': 'CWSource'}])], - None, None, [self.system], {}, []) - ]) - - def test_source_specified_3_3(self): - self.skipTest('oops') - self._test('Any STN WHERE X is Note, X type XT, X in_state ST, ST name STN, X cw_source S, S name "cards"', - [('OneFetchStep', - [('Any X,XT WHERE X is Card, X title XT', - [{'X': 'Card', 'XT': 'String'}])], - None, None, [self.cards], {}, []) - ]) - - def test_source_conflict_1(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - with self.assertRaises(BadRQLQuery) as cm: - self._test('Any X WHERE X cw_source S, S name "system", X eid %(x)s', - [], {'x': 999999}) - self.assertEqual(str(cm.exception), 'source conflict for term %(x)s') - - def test_source_conflict_2(self): - with self.assertRaises(BadRQLQuery) as cm: - self._test('Card X WHERE X cw_source S, S name "systeme"', []) - self.assertEqual(str(cm.exception), 'source conflict for term X') - - def test_source_conflict_3(self): - self.skipTest('oops') - self._test('CWSource X WHERE X cw_source S, S name "cards"', - [('OneFetchStep', - [(u'Any X WHERE X cw_source S, S name "cards", X is CWSource', - [{'S': 'CWSource', 'X': 'CWSource'}])], - None, None, - [self.system], - {}, [])]) - - - def test_ambigous_cross_relation_source_specified(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self.cards.support_relations['see_also'] = True - self.cards.cross_relations.add('see_also') - try: - self._test('Any X,AA ORDERBY AA WHERE E eid %(x)s, E see_also X, X modification_date AA', - [('AggrStep', - 'SELECT table0.C0, table0.C1 FROM table0\nORDER BY table0.C1', - None, - [('FetchStep', - [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Note', - [{'AA': 'Datetime', 'X': 'Note'}])], [self.cards, self.system], {}, - {'AA': 'table0.C1', 'X': 'table0.C0', - 'X.modification_date': 'table0.C1'}, - []), - ('FetchStep', - [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Bookmark', - [{'AA': 'Datetime', 'X': 'Bookmark'}])], - [self.system], {}, - {'AA': 'table0.C1', 'X': 'table0.C0', - 'X.modification_date': 'table0.C1'}, - [])])], - {'x': 999999}) - finally: - del self.cards.support_relations['see_also'] - self.cards.cross_relations.remove('see_also') - - # non regression tests #################################################### - - def test_nonregr1(self): - self._test('Any X, Y WHERE X copain Y, X login "syt", Y login "cochon"', - [('FetchStep', - [('Any X WHERE X login "syt", X is CWUser', [{'X': 'CWUser'}])], - [self.ldap, self.system], None, {'X': 'table0.C0'}, []), - ('FetchStep', - [('Any Y WHERE Y login "cochon", Y is CWUser', [{'Y': 'CWUser'}])], - [self.ldap, self.system], None, {'Y': 'table1.C0'}, []), - ('OneFetchStep', - [('Any X,Y WHERE X copain Y, X is CWUser, Y is CWUser', - [{'X': 'CWUser', 'Y': 'CWUser'}])], - None, None, [self.system], {'X': 'table0.C0', 'Y': 'table1.C0'}, []) - ]) - - def test_nonregr2(self): - iworkflowable = self.session.user.cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('deactivate') - treid = iworkflowable.latest_trinfo().eid - self._test('Any X ORDERBY D DESC WHERE E eid %(x)s, E wf_info_for X, X modification_date D', - [('FetchStep', [('Any X,D WHERE X modification_date D, X is Note', - [{'X': 'Note', 'D': 'Datetime'}])], - [self.cards, self.system], None, {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'D': 'table0.C1'}, []), - ('FetchStep', [('Any X,D WHERE X modification_date D, X is CWUser', - [{'X': 'CWUser', 'D': 'Datetime'}])], - [self.ldap, self.system], None, {'X': 'table1.C0', 'X.modification_date': 'table1.C1', 'D': 'table1.C1'}, []), - ('AggrStep', 'SELECT table2.C0 FROM table2\nORDER BY table2.C1 DESC', None, [ - ('FetchStep', [('Any X,D WHERE E eid %s, E wf_info_for X, X modification_date D, E is TrInfo, X is Affaire'%treid, - [{'X': 'Affaire', 'E': 'TrInfo', 'D': 'Datetime'}])], - [self.system], - {}, - {'X': 'table2.C0', 'X.modification_date': 'table2.C1', 'D': 'table2.C1', 'E.wf_info_for': 'table2.C0'}, []), - ('FetchStep', [('Any X,D WHERE E eid %s, E wf_info_for X, X modification_date D, E is TrInfo, X is CWUser'%treid, - [{'X': 'CWUser', 'E': 'TrInfo', 'D': 'Datetime'}])], - [self.system], - {'X': 'table1.C0', 'X.modification_date': 'table1.C1', 'D': 'table1.C1'}, - {'X': 'table2.C0', 'X.modification_date': 'table2.C1', 'D': 'table2.C1', 'E.wf_info_for': 'table2.C0'}, []), - ('FetchStep', [('Any X,D WHERE E eid %s, E wf_info_for X, X modification_date D, E is TrInfo, X is Note'%treid, - [{'X': 'Note', 'E': 'TrInfo', 'D': 'Datetime'}])], - [self.system], - {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'D': 'table0.C1'}, - {'X': 'table2.C0', 'X.modification_date': 'table2.C1', 'D': 'table2.C1', 'E.wf_info_for': 'table2.C0'}, []), - ]), - ], - {'x': treid}) - - def test_nonregr3(self): - # original jpl query: - # Any X, NOW - CD, P WHERE P is Project, U interested_in P, U is CWUser, U login "sthenault", X concerns P, X creation_date CD ORDERBY CD DESC LIMIT 5 - self._test('Any X, NOW - CD, P ORDERBY CD DESC LIMIT 5 WHERE P bookmarked_by U, U login "admin", P is X, X creation_date CD', - [('FetchStep', [('Any U WHERE U login "admin", U is CWUser', [{'U': 'CWUser'}])], - [self.ldap, self.system], None, {'U': 'table0.C0'}, []), - ('OneFetchStep', [('Any X,(NOW - CD),P ORDERBY CD DESC LIMIT 5 WHERE P bookmarked_by U, P is X, X creation_date CD, P is Bookmark, U is CWUser, X is CWEType', - [{'P': 'Bookmark', 'U': 'CWUser', 'X': 'CWEType', 'CD': 'Datetime'}])], - 5, None, [self.system], {'U': 'table0.C0'}, [])] - ) - - def test_nonregr4(self): - ueid = self.session.user.eid - self._test('Any U ORDERBY D DESC WHERE WF wf_info_for X, WF creation_date D, WF from_state FS, ' - 'WF owned_by U?, X eid %(x)s', - [#('FetchStep', [('Any U WHERE U is CWUser', [{'U': 'CWUser'}])], - # [self.ldap, self.system], None, {'U': 'table0.C0'}, []), - ('OneFetchStep', [('Any U ORDERBY D DESC WHERE WF wf_info_for %s, WF creation_date D, WF from_state FS, WF owned_by U?' % ueid, - [{'WF': 'TrInfo', 'FS': 'State', 'U': 'CWUser', 'D': 'Datetime'}])], - None, None, - [self.system], {}, [])], - {'x': ueid}) - - def test_nonregr5(self): - # original jpl query: - # DISTINCT Version V WHERE MB done_in MV, MV eid %(x)s, - # MB depends_on B, B done_in V, V version_of P, NOT P eid %(p)s' - cardeid = self.execute('INSERT Card X: X title "hop"')[0][0] - noteeid = self.execute('INSERT Note X')[0][0] - self._test('DISTINCT Card V WHERE MB documented_by MV, MV eid %(x)s, ' - 'MB depends_on B, B documented_by V, V multisource_rel P, NOT P eid %(p)s', - [('FetchStep', [('Any V WHERE V multisource_rel P, NOT P eid %s, P is Note, V is Card'%noteeid, - [{'P': 'Note', 'V': 'Card'}])], - [self.cards, self.system], None, {'V': 'table0.C0'}, []), - ('OneFetchStep', [('DISTINCT Any V WHERE MB documented_by %s, MB depends_on B, B documented_by V, B is Affaire, MB is Affaire, V is Card'%cardeid, - [{'B': 'Affaire', 'MB': 'Affaire', 'V': 'Card'}])], - None, None, [self.system], {'V': 'table0.C0'}, [])], - {'x': cardeid, 'p': noteeid}) - - def test_nonregr6(self): - self._test('Any X WHERE X concerne Y', - [('OneFetchStep', [('Any X WHERE X concerne Y', - [{'Y': 'Division', 'X': 'Affaire'}, - {'Y': 'Note', 'X': 'Affaire'}, - {'Y': 'Societe', 'X': 'Affaire'}, - {'Y': 'SubDivision', 'X': 'Affaire'}, - {'Y': 'Affaire', 'X': 'Personne'}])], - None, None, [self.system], {}, []) - ]) - self._test('Any X WHERE X concerne Y, Y is Note', - [('FetchStep', [('Any Y WHERE Y is Note', [{'Y': 'Note'}])], - [self.cards, self.system], None, {'Y': 'table0.C0'}, []), - ('OneFetchStep', [('Any X WHERE X concerne Y, X is Affaire, Y is Note', - [{'X': 'Affaire', 'Y': 'Note'}])], - None, None, [self.system], {'Y': 'table0.C0'}, []) - ]) - - def test_nonregr7(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any S,SUM(DUR),SUM(I),(SUM(I) - SUM(DUR)),MIN(DI),MAX(DI) GROUPBY S ORDERBY S WHERE A is Affaire, A duration DUR, A invoiced I, A modification_date DI, A in_state S, S name SN, (EXISTS(A concerne WP, W multisource_rel WP)) OR (EXISTS(A concerne W)), W eid %(n)s', - [('FetchStep', [('Any WP WHERE 999999 multisource_rel WP, WP is Note', [{'WP': 'Note'}])], - [self.cards], None, {'WP': u'table0.C0'}, []), - ('OneFetchStep', [('Any S,SUM(DUR),SUM(I),(SUM(I) - SUM(DUR)),MIN(DI),MAX(DI) GROUPBY S ORDERBY S WHERE A duration DUR, A invoiced I, A modification_date DI, A in_state S, S name SN, (EXISTS(A concerne WP, WP is Note)) OR (EXISTS(A concerne 999999)), A is Affaire, S is State', - [{'A': 'Affaire', 'DI': 'Datetime', 'DUR': 'Int', 'I': 'Float', 'S': 'State', 'SN': 'String', 'WP': 'Note'}])], - None, None, [self.system], {'WP': u'table0.C0'}, [])], - {'n': 999999}) - - def test_nonregr8(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any X,Z WHERE X eid %(x)s, X multisource_rel Y, Z concerne X', - [('FetchStep', [('Any 999999 WHERE 999999 multisource_rel Y, Y is Note', - [{'Y': 'Note'}])], - [self.cards], - None, {u'%(x)s': 'table0.C0'}, - []), - ('OneFetchStep', [('Any 999999,Z WHERE Z concerne 999999, Z is Affaire', - [{'Z': 'Affaire'}])], - None, None, [self.system], - {u'%(x)s': 'table0.C0'}, []), - ], - {'x': 999999}) - - def test_nonregr9(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - repo._type_source_cache[999998] = ('Note', 'cards', 999998, 'cards') - self._test('SET X migrated_from Y WHERE X eid %(x)s, Y multisource_rel Z, Z eid %(z)s, Y migrated_from Z', - [('FetchStep', [('Any Y WHERE Y multisource_rel 999998, Y is Note', [{'Y': 'Note'}])], - [self.cards], None, {'Y': u'table0.C0'}, []), - ('UpdateStep', - [('OneFetchStep', [('DISTINCT Any Y WHERE Y migrated_from 999998, Y is Note', - [{'Y': 'Note'}])], - None, None, [self.system], - {'Y': u'table0.C0'}, [])])], - {'x': 999999, 'z': 999998}) - - def test_nonregr10(self): - repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999, 'ldap') - self._test('Any X,AA,AB ORDERBY AA WHERE E eid %(x)s, E owned_by X, X login AA, X modification_date AB', - [('FetchStep', - [('Any X,AA,AB WHERE X login AA, X modification_date AB, X is CWUser', - [{'AA': 'String', 'AB': 'Datetime', 'X': 'CWUser'}])], - [self.ldap, self.system], None, {'AA': 'table0.C1', 'AB': 'table0.C2', - 'X': 'table0.C0', 'X.login': 'table0.C1', 'X.modification_date': 'table0.C2'}, - []), - ('OneFetchStep', - [('Any X,AA,AB ORDERBY AA WHERE 999999 owned_by X, X login AA, X modification_date AB, X is CWUser', - [{'AA': 'String', 'AB': 'Datetime', 'X': 'CWUser'}])], - None, None, [self.system], {'AA': 'table0.C1', 'AB': 'table0.C2', - 'X': 'table0.C0', 'X.login': 'table0.C1', 'X.modification_date': 'table0.C2'}, - []) - ], - {'x': 999999}) - - def test_nonregr11(self): - repo._type_source_cache[999999] = ('Bookmark', 'system', 999999, 'system') - self._test('SET X bookmarked_by Y WHERE X eid %(x)s, Y login "hop"', - [('UpdateStep', - [('OneFetchStep', [('DISTINCT Any Y WHERE Y login "hop", Y is CWUser', [{'Y': 'CWUser'}])], - None, None, [self.ldap, self.system], {}, [])] - )], - {'x': 999999}) - - def test_nonregr12(self): - repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any X ORDERBY Z DESC WHERE X modification_date Z, E eid %(x)s, E see_also X', - [('FetchStep', [('Any X,Z WHERE X modification_date Z, X is Note', - [{'X': 'Note', 'Z': 'Datetime'}])], - [self.cards, self.system], None, {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'Z': 'table0.C1'}, - []), - ('AggrStep', 'SELECT table1.C0 FROM table1\nORDER BY table1.C1 DESC', None, - [('FetchStep', [('Any X,Z WHERE X modification_date Z, 999999 see_also X, X is Bookmark', - [{'X': 'Bookmark', 'Z': 'Datetime'}])], - [self.system], {}, {'X': 'table1.C0', 'X.modification_date': 'table1.C1', - 'Z': 'table1.C1'}, - []), - ('FetchStep', [('Any X,Z WHERE X modification_date Z, 999999 see_also X, X is Note', - [{'X': 'Note', 'Z': 'Datetime'}])], - [self.system], {'X': 'table0.C0', 'X.modification_date': 'table0.C1', - 'Z': 'table0.C1'}, - {'X': 'table1.C0', 'X.modification_date': 'table1.C1', - 'Z': 'table1.C1'}, - [])] - )], - {'x': 999999}) - - def test_nonregr13_1(self): - ueid = self.session.user.eid - # identity wrapped into exists: - # should'nt propagate constraint that U is in the same source as ME - self._test('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File ' - 'WITH U,UL BEING (Any U,UL WHERE ME eid %(x)s, (EXISTS(U identity ME) ' - 'OR (EXISTS(U in_group G, G name IN("managers", "staff")))) ' - 'OR (EXISTS(U in_group H, ME in_group H, NOT H name "users")), U login UL, U is CWUser)', - [('FetchStep', [('Any U,UL WHERE U login UL, U is CWUser', - [{'U': 'CWUser', 'UL': 'String'}])], - [self.ldap, self.system], None, - {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'}, - []), - ('FetchStep', [('Any U,UL WHERE ((EXISTS(U identity %s)) OR (EXISTS(U in_group G, G name IN("managers", "staff"), G is CWGroup))) OR (EXISTS(U in_group H, %s in_group H, NOT H name "users", H is CWGroup)), U login UL, U is CWUser' % (ueid, ueid), - [{'G': 'CWGroup', 'H': 'CWGroup', 'U': 'CWUser', 'UL': 'String'}])], - [self.system], - {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'}, - {'U': 'table1.C0', 'U.login': 'table1.C1', 'UL': 'table1.C1'}, - []), - ('OneFetchStep', [('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File', - [{'B': 'File', 'U': 'CWUser', 'UL': 'String'}])], - None, None, [self.system], - {'U': 'table1.C0', 'UL': 'table1.C1'}, - [])], - {'x': ueid}) - - def test_nonregr13_2(self): - # identity *not* wrapped into exists. - # - # XXX this test fail since in this case, in "U identity 5" U and 5 are - # from the same scope so constraints are applied (telling the U should - # come from the same source as user with eid 5). - # - # IMO this is normal, unless we introduce a special case for the - # identity relation. BUT I think it's better to leave it as is and to - # explain constraint propagation rules, and so why this should be - # wrapped in exists() if used in multi-source - self.skipTest('take a look at me if you wish') - ueid = self.session.user.eid - self._test('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File ' - 'WITH U,UL BEING (Any U,UL WHERE ME eid %(x)s, (U identity ME ' - 'OR (EXISTS(U in_group G, G name IN("managers", "staff")))) ' - 'OR (EXISTS(U in_group H, ME in_group H, NOT H name "users")), U login UL, U is CWUser)', - [('FetchStep', [('Any U,UL WHERE U login UL, U is CWUser', - [{'U': 'CWUser', 'UL': 'String'}])], - [self.ldap, self.system], None, - {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'}, - []), - ('FetchStep', [('Any U,UL WHERE ((U identity %s) OR (EXISTS(U in_group G, G name IN("managers", "staff"), G is CWGroup))) OR (EXISTS(U in_group H, %s in_group H, NOT H name "users", H is CWGroup)), U login UL, U is CWUser' % (ueid, ueid), - [{'G': 'CWGroup', 'H': 'CWGroup', 'U': 'CWUser', 'UL': 'String'}])], - [self.system], - {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'}, - {'U': 'table1.C0', 'U.login': 'table1.C1', 'UL': 'table1.C1'}, - []), - ('OneFetchStep', [('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File', - [{'B': 'File', 'U': 'CWUser', 'UL': 'String'}])], - None, None, [self.system], - {'U': 'table1.C0', 'UL': 'table1.C1'}, - [])], - {'x': self.session.user.eid}) - - def test_nonregr14_1(self): - repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999, 'ldap') - self._test('Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s', - [('OneFetchStep', [('Any 999999 WHERE 999999 owned_by 999999', [{}])], - None, None, [self.system], {}, [])], - {'x': 999999, 'u': 999999}) - - def test_nonregr14_2(self): - repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999, 'ldap') - repo._type_source_cache[999998] = ('Note', 'system', 999998, 'system') - self._test('Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s', - [('OneFetchStep', [('Any 999998 WHERE 999998 owned_by 999999', [{}])], - None, None, [self.system], {}, [])], - {'x': 999998, 'u': 999999}) - - def test_nonregr14_3(self): - repo._type_source_cache[999999] = ('CWUser', 'system', 999999, 'system') - repo._type_source_cache[999998] = ('CWUser', 'ldap', 999998, 'ldap') - self._test('Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s', - [('OneFetchStep', [('Any 999998 WHERE 999998 owned_by 999999', [{}])], - None, None, [self.system], {}, [])], - {'x': 999998, 'u': 999999}) - - def test_nonregr_identity_no_source_access_1(self): - repo._type_source_cache[999999] = ('CWUser', 'ldap', 999998, 'ldap') - self._test('Any S WHERE S identity U, S eid %(s)s, U eid %(u)s', - [('OneFetchStep', [('Any 999999 WHERE 999999 identity 999999', [{}])], - None, None, [self.system], {}, [])], - {'s': 999999, 'u': 999999}) - - def test_nonregr_identity_no_source_access_2(self): - repo._type_source_cache[999999] = ('EmailAddress', 'system', 999999, 'system') - repo._type_source_cache[999998] = ('CWUser', 'ldap', 999998, 'ldap') - self._test('Any X WHERE O use_email X, ((EXISTS(O identity U)) OR (EXISTS(O in_group G, G name IN("managers", "staff")))) OR (EXISTS(O in_group G2, U in_group G2, NOT G2 name "users")), X eid %(x)s, U eid %(u)s', - [('OneFetchStep', [('Any 999999 WHERE O use_email 999999, ((EXISTS(O identity 999998)) OR (EXISTS(O in_group G, G name IN("managers", "staff")))) OR (EXISTS(O in_group G2, 999998 in_group G2, NOT G2 name "users"))', - [{'G': 'CWGroup', 'G2': 'CWGroup', 'O': 'CWUser'}])], - None, None, [self.system], {}, [])], - {'x': 999999, 'u': 999998}) - - def test_nonregr_similar_subquery(self): - repo._type_source_cache[999999] = ('Personne', 'system', 999999, 'system') - self._test('Any T,TD,U,T,UL WITH T,TD,U,UL BEING (' - '(Any T,TD,U,UL WHERE X eid %(x)s, T comments X, T content TD, T created_by U?, U login UL)' - ' UNION ' - '(Any T,TD,U,UL WHERE X eid %(x)s, X connait P, T comments P, T content TD, T created_by U?, U login UL))', - # XXX optimization: use a OneFetchStep with a UNION of both queries - [('FetchStep', [('Any U,UL WHERE U login UL, U is CWUser', - [{'U': 'CWUser', 'UL': 'String'}])], - [self.ldap, self.system], None, - {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'}, - []), - ('UnionFetchStep', - [('FetchStep', - [('Any T,TD,U,UL WHERE T comments 999999, T content TD, T created_by U?, U login UL, T is Comment, U is CWUser', - [{'T': 'Comment', 'TD': 'String', 'U': 'CWUser', 'UL': 'String'}])], - [self.system], - {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'}, - {'T': 'table1.C0', - 'T.content': 'table1.C1', - 'TD': 'table1.C1', - 'U': 'table1.C2', - 'U.login': 'table1.C3', - 'UL': 'table1.C3'}, - []), - ('FetchStep', - [('Any T,TD,U,UL WHERE 999999 connait P, T comments P, T content TD, T created_by U?, U login UL, P is Personne, T is Comment, U is CWUser', - [{'P': 'Personne', - 'T': 'Comment', - 'TD': 'String', - 'U': 'CWUser', - 'UL': 'String'}])], - [self.system], - {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'}, - {'T': 'table1.C0', - 'T.content': 'table1.C1', - 'TD': 'table1.C1', - 'U': 'table1.C2', - 'U.login': 'table1.C3', - 'UL': 'table1.C3'}, - [])]), - ('OneFetchStep', - [('Any T,TD,U,T,UL', - [{'T': 'Comment', 'TD': 'String', 'U': 'CWUser', 'UL': 'String'}])], - None, None, - [self.system], - {'T': 'table1.C0', 'TD': 'table1.C1', 'U': 'table1.C2', 'UL': 'table1.C3'}, - [])], - {'x': 999999}) - - def test_nonregr_dont_readd_already_processed_relation(self): - self._test('Any WO,D,SO WHERE WO is Note, D tags WO, WO in_state SO', - [('FetchStep', - [('Any WO,SO WHERE WO in_state SO, SO is State, WO is Note', - [{'SO': 'State', 'WO': 'Note'}])], - [self.cards, self.system], None, - {'SO': 'table0.C1', 'WO': 'table0.C0'}, - []), - ('OneFetchStep', - [('Any WO,D,SO WHERE D tags WO, D is Tag, SO is State, WO is Note', - [{'D': 'Tag', 'SO': 'State', 'WO': 'Note'}])], - None, None, [self.system], - {'SO': 'table0.C1', 'WO': 'table0.C0'}, - []) - ]) - -class MSPlannerTwoSameExternalSourcesTC(BasePlannerTC): - """test planner related feature on a 3-sources repository: - - * 2 rql sources supporting Card - """ - - def setUp(self): - self.__class__.repo = repo - self.setup() - self.add_source(FakeCardSource, 'cards') - self.add_source(FakeCardSource, 'cards2') - self.planner = MSPlanner(self.o.schema, self.repo.vreg.rqlhelper) - assert repo.sources_by_uri['cards2'].support_relation('multisource_crossed_rel') - assert 'multisource_crossed_rel' in repo.sources_by_uri['cards2'].cross_relations - assert repo.sources_by_uri['cards'].support_relation('multisource_crossed_rel') - assert 'multisource_crossed_rel' in repo.sources_by_uri['cards'].cross_relations - _test = test_plan - - - def test_linked_external_entities(self): - repo._type_source_cache[999999] = ('Tag', 'system', 999999, 'system') - self._test('Any X,XT WHERE X is Card, X title XT, T tags X, T eid %(t)s', - [('FetchStep', - [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], - [self.cards, self.cards2, self.system], - None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, - []), - ('OneFetchStep', - [('Any X,XT WHERE X title XT, 999999 tags X, X is Card', - [{'X': 'Card', 'XT': 'String'}])], - None, None, [self.system], - {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, - [])], - {'t': 999999}) - - def test_version_depends_on(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any X,AD,AE WHERE E eid %(x)s, E migrated_from X, X in_state AD, AD name AE', - [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - [self.cards, self.cards2, self.system], - None, {'AD': 'table0.C1', 'AD.name': 'table0.C2', - 'AE': 'table0.C2', 'X': 'table0.C0'}, - []), - ('OneFetchStep', [('Any X,AD,AE WHERE 999999 migrated_from X, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - None, None, [self.system], - {'AD': 'table0.C1', 'AD.name': 'table0.C2', 'AE': 'table0.C2', 'X': 'table0.C0'}, - [])], - {'x': 999999}) - - def test_version_crossed_depends_on_1(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE', - [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - [self.cards, self.cards2, self.system], - None, {'AD': 'table0.C1', 'AD.name': 'table0.C2', - 'AE': 'table0.C2', 'X': 'table0.C0'}, - []), - ('UnionStep', None, None, - [('OneFetchStep', [('Any X,AD,AE WHERE 999999 multisource_crossed_rel X, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - None, None, [self.cards], None, - []), - ('OneFetchStep', [('Any X,AD,AE WHERE 999999 multisource_crossed_rel X, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - None, None, [self.system], - {'AD': 'table0.C1', 'AD.name': 'table0.C2', - 'AE': 'table0.C2', 'X': 'table0.C0'}, - [])] - )], - {'x': 999999}) - - def test_version_crossed_depends_on_2(self): - self.repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system') - self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE', - [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - [self.cards, self.cards2, self.system], - None, {'AD': 'table0.C1', 'AD.name': 'table0.C2', - 'AE': 'table0.C2', 'X': 'table0.C0'}, - []), - ('OneFetchStep', [('Any X,AD,AE WHERE 999999 multisource_crossed_rel X, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - None, None, [self.system], - {'AD': 'table0.C1', 'AD.name': 'table0.C2', 'AE': 'table0.C2', 'X': 'table0.C0'}, - [])], - {'x': 999999}) - - def test_version_crossed_depends_on_3(self): - self._test('Any X,AD,AE WHERE E multisource_crossed_rel X, X in_state AD, AD name AE, E is Note', - [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note', - [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - [self.cards, self.cards2, self.system], - None, {'AD': 'table0.C1', 'AD.name': 'table0.C2', - 'AE': 'table0.C2', 'X': 'table0.C0'}, - []), - ('FetchStep', [('Any E WHERE E is Note', [{'E': 'Note'}])], - [self.cards, self.cards2, self.system], - None, {'E': 'table1.C0'}, - []), - ('UnionStep', None, None, - [('OneFetchStep', [('Any X,AD,AE WHERE E multisource_crossed_rel X, AD name AE, AD is State, E is Note, X is Note', - [{'AD': 'State', 'AE': 'String', 'E': 'Note', 'X': 'Note'}])], - None, None, [self.cards, self.cards2], None, - []), - ('OneFetchStep', [('Any X,AD,AE WHERE E multisource_crossed_rel X, AD name AE, AD is State, E is Note, X is Note', - [{'AD': 'State', 'AE': 'String', 'E': 'Note', 'X': 'Note'}])], - None, None, [self.system], - {'AD': 'table0.C1', - 'AD.name': 'table0.C2', - 'AE': 'table0.C2', - 'E': 'table1.C0', - 'X': 'table0.C0'}, - [])] - )] - ) - - def test_version_crossed_depends_on_4(self): - self._test('Any X,AD,AE WHERE EXISTS(E multisource_crossed_rel X), X in_state AD, AD name AE, E is Note', - [('FetchStep', - [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note', - [{'X': 'Note', 'AD': 'State', 'AE': 'String'}])], - [self.cards, self.cards2, self.system], None, - {'X': 'table0.C0', - 'AD': 'table0.C1', - 'AD.name': 'table0.C2', - 'AE': 'table0.C2'}, - []), - ('FetchStep', - [('Any A WHERE E multisource_crossed_rel A, A is Note, E is Note', - [{'A': 'Note', 'E': 'Note'}])], - [self.cards, self.cards2, self.system], None, - {'A': 'table1.C0'}, - []), - ('OneFetchStep', - [('Any X,AD,AE WHERE EXISTS(X identity A), AD name AE, A is Note, AD is State, X is Note', - [{'A': 'Note', 'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - None, None, - [self.system], - {'A': 'table1.C0', - 'AD': 'table0.C1', - 'AD.name': 'table0.C2', - 'AE': 'table0.C2', - 'X': 'table0.C0'}, - [] - )] - ) - - def test_nonregr_dont_cross_rel_source_filtering_1(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any S WHERE E eid %(x)s, E in_state S, NOT S name "moved"', - [('OneFetchStep', [('Any S WHERE 999999 in_state S, NOT S name "moved", S is State', - [{'S': 'State'}])], - None, None, [self.cards], {}, [] - )], - {'x': 999999}) - - def test_nonregr_dont_cross_rel_source_filtering_2(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB', - [('OneFetchStep', [('Any X,AA,AB WHERE 999999 in_state X, X name AA, X modification_date AB, X is State', - [{'AA': 'String', 'AB': 'Datetime', 'X': 'State'}])], - None, None, [self.cards], {}, [] - )], - {'x': 999999}) - - def test_nonregr_eid_query(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Any X WHERE X eid 999999', - [('OneFetchStep', [('Any 999999', [{}])], - None, None, [self.system], {}, [] - )], - {'x': 999999}) - - - def test_nonregr_not_is(self): - self._test("Any X WHERE X owned_by U, U login 'anon', NOT X is Comment", - [('FetchStep', [('Any X WHERE X is IN(Card, Note, State)', - [{'X': 'Note'}, {'X': 'State'}, {'X': 'Card'}])], - [self.cards, self.cards2, self.system], - None, {'X': 'table0.C0'}, []), - ('UnionStep', None, None, - [('OneFetchStep', - [(u'Any X WHERE X owned_by U, U login "anon", U is CWUser, X is IN(Affaire, BaseTransition, Basket, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWDataImport, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWSourceHostConfig, CWSourceSchemaConfig, CWUniqueTogetherConstraint, CWUser, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Old, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', - [{'U': 'CWUser', 'X': 'Affaire'}, - {'U': 'CWUser', 'X': 'BaseTransition'}, - {'U': 'CWUser', 'X': 'Basket'}, - {'U': 'CWUser', 'X': 'Bookmark'}, - {'U': 'CWUser', 'X': 'CWAttribute'}, - {'U': 'CWUser', 'X': 'CWCache'}, - {'U': 'CWUser', 'X': 'CWConstraint'}, - {'U': 'CWUser', 'X': 'CWConstraintType'}, - {'U': 'CWUser', 'X': 'CWDataImport'}, - {'U': 'CWUser', 'X': 'CWEType'}, - {'U': 'CWUser', 'X': 'CWGroup'}, - {'U': 'CWUser', 'X': 'CWPermission'}, - {'U': 'CWUser', 'X': 'CWProperty'}, - {'U': 'CWUser', 'X': 'CWRType'}, - {'U': 'CWUser', 'X': 'CWRelation'}, - {'U': 'CWUser', 'X': 'CWSource'}, - {'U': 'CWUser', 'X': 'CWSourceHostConfig'}, - {'U': 'CWUser', 'X': 'CWSourceSchemaConfig'}, - {'U': 'CWUser', 'X': 'CWUniqueTogetherConstraint'}, - {'U': 'CWUser', 'X': 'CWUser'}, - {'U': 'CWUser', 'X': 'Division'}, - {'U': 'CWUser', 'X': 'Email'}, - {'U': 'CWUser', 'X': 'EmailAddress'}, - {'U': 'CWUser', 'X': 'EmailPart'}, - {'U': 'CWUser', 'X': 'EmailThread'}, - {'U': 'CWUser', 'X': 'ExternalUri'}, - {'U': 'CWUser', 'X': 'File'}, - {'U': 'CWUser', 'X': 'Folder'}, - {'U': 'CWUser', 'X': 'Old'}, - {'U': 'CWUser', 'X': 'Personne'}, - {'U': 'CWUser', 'X': 'RQLExpression'}, - {'U': 'CWUser', 'X': 'Societe'}, - {'U': 'CWUser', 'X': 'SubDivision'}, - {'U': 'CWUser', 'X': 'SubWorkflowExitPoint'}, - {'U': 'CWUser', 'X': 'Tag'}, - {'U': 'CWUser', 'X': 'TrInfo'}, - {'U': 'CWUser', 'X': 'Transition'}, - {'U': 'CWUser', 'X': 'Workflow'}, - {'U': 'CWUser', 'X': 'WorkflowTransition'}])], - None, None, - [self.system], {}, []), - ('OneFetchStep', - [(u'Any X WHERE X owned_by U, U login "anon", U is CWUser, X is IN(Card, Note, State)', - [{'U': 'CWUser', 'X': 'Note'}, - {'U': 'CWUser', 'X': 'State'}, - {'U': 'CWUser', 'X': 'Card'}])], - None, None, - [self.system], {'X': 'table0.C0'}, []) - ]) - ]) - - def test_remove_from_deleted_source_1(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self._test('Note X WHERE X eid 999999, NOT X cw_source Y', - [('OneFetchStep', - [('Any 999999 WHERE NOT EXISTS(999999 cw_source Y)', - [{'Y': 'CWSource'}])], - None, None, [self.system], {}, []) - ]) - - def test_remove_from_deleted_source_2(self): - self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards') - self.repo._type_source_cache[999998] = ('Note', 'cards', 999998, 'cards') - self._test('Note X WHERE X eid IN (999998, 999999), NOT X cw_source Y', - [('FetchStep', - [('Any X WHERE X eid IN(999998, 999999), X is Note', - [{'X': 'Note'}])], - [self.cards], None, {'X': 'table0.C0'}, []), - ('OneFetchStep', - [('Any X WHERE NOT EXISTS(X cw_source Y, Y is CWSource), X is Note', - [{'X': 'Note', 'Y': 'CWSource'}])], - None, None, [self.system],{'X': 'table0.C0'}, []) - ]) - - -class FakeVCSSource(AbstractSource): - uri = 'ccc' - support_entities = {'Card': True, 'Note': True} - support_relations = {'multisource_inlined_rel': True, - 'multisource_rel': True} - - def syntax_tree_search(self, *args, **kwargs): - return [] - -class MSPlannerVCSSource(BasePlannerTC): - - def setUp(self): - self.__class__.repo = repo - self.setup() - self.add_source(FakeVCSSource, 'vcs') - self.planner = MSPlanner(self.o.schema, self.repo.vreg.rqlhelper) - _test = test_plan - - def test_multisource_inlined_rel_skipped(self): - self._test('Any MAX(VC) ' - 'WHERE VC multisource_inlined_rel R2, R para %(branch)s, VC in_state S, S name "published", ' - '(EXISTS(R identity R2)) OR (EXISTS(R multisource_rel R2))', - [('FetchStep', [('Any VC WHERE VC multisource_inlined_rel R2, R para "???", (EXISTS(R identity R2)) OR (EXISTS(R multisource_rel R2)), R is Note, R2 is Note, VC is Note', - [{'R': 'Note', 'R2': 'Note', 'VC': 'Note'}])], - [self.vcs, self.system], None, - {'VC': 'table0.C0'}, - []), - ('OneFetchStep', [(u'Any MAX(VC) WHERE VC in_state S, S name "published", S is State, VC is Note', - [{'S': 'State', 'VC': 'Note'}])], - None, None, [self.system], - {'VC': 'table0.C0'}, - []) - ]) - - def test_fully_simplified_extsource(self): - self.repo._type_source_cache[999998] = ('Note', 'vcs', 999998, 'vcs') - self.repo._type_source_cache[999999] = ('Note', 'vcs', 999999, 'vcs') - self._test('Any X, Y WHERE NOT X multisource_rel Y, X eid 999998, Y eid 999999', - [('OneFetchStep', [('Any 999998,999999 WHERE NOT EXISTS(999998 multisource_rel 999999)', [{}])], - None, None, [self.vcs], {}, []) - ]) - - def test_nonregr_fully_simplified_extsource(self): - self.repo._type_source_cache[999998] = ('Note', 'vcs', 999998, 'vcs') - self.repo._type_source_cache[999999] = ('Note', 'vcs', 999999, 'vcs') - self.repo._type_source_cache[1000000] = ('Note', 'system', 1000000, 'system') - self._test('DISTINCT Any T,FALSE,L,M WHERE L eid 1000000, M eid 999999, T eid 999998', - [('OneFetchStep', [('DISTINCT Any 999998,FALSE,1000000,999999', [{}])], - None, None, [self.system], {}, []) - ]) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main()
--- a/server/test/unittest_multisources.py Fri Jan 10 18:31:07 2014 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,394 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. - -from datetime import datetime, timedelta -from itertools import repeat - -from cubicweb.devtools import TestServerConfiguration, init_test_database -from cubicweb.devtools.testlib import CubicWebTC, Tags -from cubicweb.devtools.repotest import do_monkey_patch, undo_monkey_patch -from cubicweb.devtools import get_test_db_handler - -class ExternalSource1Configuration(TestServerConfiguration): - sourcefile = 'sources_extern' - -class ExternalSource2Configuration(TestServerConfiguration): - sourcefile = 'sources_multi' - -MTIME = datetime.utcnow() - timedelta(0, 10) - -EXTERN_SOURCE_CFG = u''' -cubicweb-user = admin -cubicweb-password = gingkow -base-url=http://extern.org/ -''' - -# hi-jacking -from cubicweb.server.sources.pyrorql import PyroRQLSource -from cubicweb.dbapi import Connection - -PyroRQLSource_get_connection = PyroRQLSource.get_connection -Connection_close = Connection.close - -def add_extern_mapping(source): - source.init_mapping(zip(('Card', 'Affaire', 'State', - 'in_state', 'documented_by', 'multisource_inlined_rel'), - repeat(u'write'))) - - -def pre_setup_database_extern(session, config): - session.execute('INSERT Card X: X title "C3: An external card", X wikiid "aaa"') - session.execute('INSERT Card X: X title "C4: Ze external card", X wikiid "zzz"') - session.execute('INSERT Affaire X: X ref "AFFREF"') - session.commit() - -def pre_setup_database_multi(session, config): - session.create_entity('CWSource', name=u'extern', type=u'pyrorql', - url=u'pyro:///extern', config=EXTERN_SOURCE_CFG) - session.commit() - - -class TwoSourcesTC(CubicWebTC): - """Main repo -> extern-multi -> extern - \-------------/ - """ - test_db_id= 'cw-server-multisources' - tags = CubicWebTC.tags | Tags(('multisources')) - - @classmethod - def setUpClass(cls): - cls._cfg2 = ExternalSource1Configuration('data', apphome=TwoSourcesTC.datadir) - cls._cfg3 = ExternalSource2Configuration('data', apphome=TwoSourcesTC.datadir) - TestServerConfiguration.no_sqlite_wrap = True - # hi-jack PyroRQLSource.get_connection to access existing connection (no - # pyro connection) - PyroRQLSource.get_connection = lambda x: x.uri == 'extern-multi' and cls.cnx3 or cls.cnx2 - # also necessary since the repository is closing its initial connections - # pool though we want to keep cnx2 valid - Connection.close = lambda x: None - - @classmethod - def tearDowncls(cls): - PyroRQLSource.get_connection = PyroRQLSource_get_connection - Connection.close = Connection_close - cls.cnx2.close() - cls.cnx3.close() - TestServerConfiguration.no_sqlite_wrap = False - - @classmethod - def _init_repo(cls): - repo2_handler = get_test_db_handler(cls._cfg2) - repo2_handler.build_db_cache('4cards-1affaire',pre_setup_func=pre_setup_database_extern) - cls.repo2, cls.cnx2 = repo2_handler.get_repo_and_cnx('4cards-1affaire') - - repo3_handler = get_test_db_handler(cls._cfg3) - repo3_handler.build_db_cache('multisource',pre_setup_func=pre_setup_database_multi) - cls.repo3, cls.cnx3 = repo3_handler.get_repo_and_cnx('multisource') - - - super(TwoSourcesTC, cls)._init_repo() - - def setUp(self): - CubicWebTC.setUp(self) - self.addCleanup(self.cnx2.close) - self.addCleanup(self.cnx3.close) - do_monkey_patch() - - def tearDown(self): - for source in self.repo.sources[1:]: - self.repo.remove_source(source.uri) - CubicWebTC.tearDown(self) - self.cnx2.close() - self.cnx3.close() - undo_monkey_patch() - - @staticmethod - def pre_setup_database(session, config): - for uri, src_config in [('extern', EXTERN_SOURCE_CFG), - ('extern-multi', ''' -cubicweb-user = admin -cubicweb-password = gingkow -''')]: - source = session.create_entity('CWSource', name=unicode(uri), - type=u'pyrorql', url=u'pyro:///extern-multi', - config=unicode(src_config)) - session.commit() - add_extern_mapping(source) - - session.commit() - # trigger discovery - session.execute('Card X') - session.execute('Affaire X') - session.execute('State X') - - def setup_database(self): - cu2 = self.cnx2.cursor() - self.ec1 = cu2.execute('Any X WHERE X is Card, X title "C3: An external card", X wikiid "aaa"')[0][0] - self.aff1 = cu2.execute('Any X WHERE X is Affaire, X ref "AFFREF"')[0][0] - cu2.close() - # add some entities - self.ic1 = self.sexecute('INSERT Card X: X title "C1: An internal card", X wikiid "aaai"')[0][0] - self.ic2 = self.sexecute('INSERT Card X: X title "C2: Ze internal card", X wikiid "zzzi"')[0][0] - - def test_eid_comp(self): - rset = self.sexecute('Card X WHERE X eid > 1') - self.assertEqual(len(rset), 4) - rset = self.sexecute('Any X,T WHERE X title T, X eid > 1') - self.assertEqual(len(rset), 4) - - def test_metainformation(self): - rset = self.sexecute('Card X ORDERBY T WHERE X title T') - # 2 added to the system source, 2 added to the external source - self.assertEqual(len(rset), 4) - # since they are orderd by eid, we know the 3 first one is coming from the system source - # and the others from external source - self.assertEqual(rset.get_entity(0, 0).cw_metainformation(), - {'source': {'type': 'native', 'uri': 'system', 'use-cwuri-as-url': False}, - 'type': u'Card', 'extid': None}) - externent = rset.get_entity(3, 0) - metainf = externent.cw_metainformation() - self.assertEqual(metainf['source'], {'type': 'pyrorql', 'base-url': 'http://extern.org/', 'uri': 'extern', 'use-cwuri-as-url': False}) - self.assertEqual(metainf['type'], 'Card') - self.assert_(metainf['extid']) - etype = self.sexecute('Any ETN WHERE X is ET, ET name ETN, X eid %(x)s', - {'x': externent.eid})[0][0] - self.assertEqual(etype, 'Card') - - def test_order_limit_offset(self): - rsetbase = self.sexecute('Any W,X ORDERBY W,X WHERE X wikiid W') - self.assertEqual(len(rsetbase), 4) - self.assertEqual(sorted(rsetbase.rows), rsetbase.rows) - rset = self.sexecute('Any W,X ORDERBY W,X LIMIT 2 OFFSET 2 WHERE X wikiid W') - self.assertEqual(rset.rows, rsetbase.rows[2:4]) - - def test_has_text(self): - self.repo.sources_by_uri['extern'].synchronize(MTIME) # in case fti_update has been run before - self.assertTrue(self.sexecute('Any X WHERE X has_text "affref"')) - self.assertTrue(self.sexecute('Affaire X WHERE X has_text "affref"')) - self.assertTrue(self.sexecute('Any X ORDERBY FTIRANK(X) WHERE X has_text "affref"')) - self.assertTrue(self.sexecute('Affaire X ORDERBY FTIRANK(X) WHERE X has_text "affref"')) - - def test_anon_has_text(self): - self.repo.sources_by_uri['extern'].synchronize(MTIME) # in case fti_update has been run before - self.sexecute('INSERT Affaire X: X ref "no readable card"')[0][0] - aff1 = self.sexecute('INSERT Affaire X: X ref "card"')[0][0] - # grant read access - self.sexecute('SET X owned_by U WHERE X eid %(x)s, U login "anon"', {'x': aff1}) - self.commit() - cnx = self.login('anon') - cu = cnx.cursor() - rset = cu.execute('Any X WHERE X has_text "card"') - # 5: 4 card + 1 readable affaire - self.assertEqual(len(rset), 5, zip(rset.rows, rset.description)) - rset = cu.execute('Any X ORDERBY FTIRANK(X) WHERE X has_text "card"') - self.assertEqual(len(rset), 5, zip(rset.rows, rset.description)) - Connection_close(cnx.cnx) # cnx is a TestCaseConnectionProxy - - def test_synchronization(self): - cu = self.cnx2.cursor() - assert cu.execute('Any X WHERE X eid %(x)s', {'x': self.aff1}) - cu.execute('SET X ref "BLAH" WHERE X eid %(x)s', {'x': self.aff1}) - aff2 = cu.execute('INSERT Affaire X: X ref "AFFREUX"')[0][0] - self.cnx2.commit() - try: - # force sync - self.repo.sources_by_uri['extern'].synchronize(MTIME) - self.assertTrue(self.sexecute('Any X WHERE X has_text "blah"')) - self.assertTrue(self.sexecute('Any X WHERE X has_text "affreux"')) - cu.execute('DELETE Affaire X WHERE X eid %(x)s', {'x': aff2}) - self.cnx2.commit() - self.repo.sources_by_uri['extern'].synchronize(MTIME) - rset = self.sexecute('Any X WHERE X has_text "affreux"') - self.assertFalse(rset) - finally: - # restore state - cu.execute('SET X ref "AFFREF" WHERE X eid %(x)s', {'x': self.aff1}) - self.cnx2.commit() - - def test_simplifiable_var(self): - affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] - rset = self.sexecute('Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB', - {'x': affeid}) - self.assertEqual(len(rset), 1) - self.assertEqual(rset[0][1], "pitetre") - - def test_simplifiable_var_2(self): - affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] - rset = self.sexecute('Any E WHERE E eid %(x)s, E in_state S, NOT S name "moved"', - {'x': affeid, 'u': self.session.user.eid}) - self.assertEqual(len(rset), 1) - - def test_sort_func(self): - self.sexecute('Affaire X ORDERBY DUMB_SORT(RF) WHERE X ref RF') - - def test_sort_func_ambigous(self): - self.sexecute('Any X ORDERBY DUMB_SORT(RF) WHERE X title RF') - - def test_in_eid(self): - iec1 = self.repo.extid2eid(self.repo.sources_by_uri['extern'], str(self.ec1), - 'Card', self.session) - rset = self.sexecute('Any X WHERE X eid IN (%s, %s)' % (iec1, self.ic1)) - self.assertEqual(sorted(r[0] for r in rset.rows), sorted([iec1, self.ic1])) - - def test_greater_eid(self): - rset = self.sexecute('Any X WHERE X eid > %s' % (self.ic1 - 1)) - self.assertEqual(len(rset.rows), 2) # self.ic1 and self.ic2 - cu = self.cnx2.cursor() - ec2 = cu.execute('INSERT Card X: X title "glup"')[0][0] - self.cnx2.commit() - # 'X eid > something' should not trigger discovery - rset = self.sexecute('Any X WHERE X eid > %s' % (self.ic1 - 1)) - self.assertEqual(len(rset.rows), 2) - # trigger discovery using another query - crset = self.sexecute('Card X WHERE X title "glup"') - self.assertEqual(len(crset.rows), 1) - rset = self.sexecute('Any X WHERE X eid > %s' % (self.ic1 - 1)) - self.assertEqual(len(rset.rows), 3) - rset = self.sexecute('Any MAX(X)') - self.assertEqual(len(rset.rows), 1) - self.assertEqual(rset.rows[0][0], crset[0][0]) - - def test_attr_unification_1(self): - n1 = self.sexecute('INSERT Note X: X type "AFFREF"')[0][0] - n2 = self.sexecute('INSERT Note X: X type "AFFREU"')[0][0] - rset = self.sexecute('Any X,Y WHERE X is Note, Y is Affaire, X type T, Y ref T') - self.assertEqual(len(rset), 1, rset.rows) - - def test_attr_unification_2(self): - cu = self.cnx2.cursor() - ec2 = cu.execute('INSERT Card X: X title "AFFREF"')[0][0] - self.cnx2.commit() - try: - c1 = self.sexecute('INSERT Card C: C title "AFFREF"')[0][0] - rset = self.sexecute('Any X,Y WHERE X is Card, Y is Affaire, X title T, Y ref T') - self.assertEqual(len(rset), 2, rset.rows) - finally: - cu.execute('DELETE Card X WHERE X eid %(x)s', {'x': ec2}) - self.cnx2.commit() - - def test_attr_unification_neq_1(self): - # XXX complete - self.sexecute('Any X,Y WHERE X is Note, Y is Affaire, X creation_date D, Y creation_date > D') - - def test_attr_unification_neq_2(self): - # XXX complete - self.sexecute('Any X,Y WHERE X is Card, Y is Affaire, X creation_date D, Y creation_date > D') - - def test_union(self): - afeids = self.sexecute('Affaire X') - ueids = self.sexecute('CWUser X') - rset = self.sexecute('(Any X WHERE X is Affaire) UNION (Any X WHERE X is CWUser)') - self.assertEqual(sorted(r[0] for r in rset.rows), - sorted(r[0] for r in afeids + ueids)) - - def test_subquery1(self): - rsetbase = self.sexecute('Any W,X WITH W,X BEING (Any W,X ORDERBY W,X WHERE X wikiid W)') - self.assertEqual(len(rsetbase), 4) - self.assertEqual(sorted(rsetbase.rows), rsetbase.rows) - rset = self.sexecute('Any W,X LIMIT 2 OFFSET 2 WITH W,X BEING (Any W,X ORDERBY W,X WHERE X wikiid W)') - self.assertEqual(rset.rows, rsetbase.rows[2:4]) - rset = self.sexecute('Any W,X ORDERBY W,X LIMIT 2 OFFSET 2 WITH W,X BEING (Any W,X WHERE X wikiid W)') - self.assertEqual(rset.rows, rsetbase.rows[2:4]) - rset = self.sexecute('Any W,X WITH W,X BEING (Any W,X ORDERBY W,X LIMIT 2 OFFSET 2 WHERE X wikiid W)') - self.assertEqual(rset.rows, rsetbase.rows[2:4]) - - def test_subquery2(self): - affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] - rset = self.sexecute('Any X,AA,AB WITH X,AA,AB BEING (Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB)', - {'x': affeid}) - self.assertEqual(len(rset), 1) - self.assertEqual(rset[0][1], "pitetre") - - def test_not_relation(self): - states = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN')) - userstate = self.session.user.in_state[0] - states.remove((userstate.eid, userstate.name)) - notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s', - {'x': self.session.user.eid})) - self.assertSetEqual(notstates, states) - aff1 = self.sexecute('Any X WHERE X is Affaire, X ref "AFFREF"')[0][0] - aff1stateeid, aff1statename = self.sexecute('Any S,SN WHERE X eid %(x)s, X in_state S, S name SN', {'x': aff1})[0] - self.assertEqual(aff1statename, 'pitetre') - states.add((userstate.eid, userstate.name)) - states.remove((aff1stateeid, aff1statename)) - notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s', - {'x': aff1})) - self.assertSetEqual(notstates, states) - - def test_absolute_url_base_url(self): - cu = self.cnx2.cursor() - ceid = cu.execute('INSERT Card X: X title "without wikiid to get eid based url"')[0][0] - self.cnx2.commit() - lc = self.sexecute('Card X WHERE X title "without wikiid to get eid based url"').get_entity(0, 0) - self.assertEqual(lc.absolute_url(), 'http://extern.org/%s' % ceid) - cu.execute('DELETE Card X WHERE X eid %(x)s', {'x':ceid}) - self.cnx2.commit() - - def test_absolute_url_no_base_url(self): - cu = self.cnx3.cursor() - ceid = cu.execute('INSERT Card X: X title "without wikiid to get eid based url"')[0][0] - self.cnx3.commit() - lc = self.sexecute('Card X WHERE X title "without wikiid to get eid based url"').get_entity(0, 0) - self.assertEqual(lc.absolute_url(), 'http://testing.fr/cubicweb/%s' % lc.eid) - cu.execute('DELETE Card X WHERE X eid %(x)s', {'x':ceid}) - self.cnx3.commit() - - def test_crossed_relation_noeid_needattr(self): - """http://www.cubicweb.org/ticket/1382452""" - aff1 = self.sexecute('INSERT Affaire X: X ref "AFFREF"')[0][0] - # link within extern source - ec1 = self.sexecute('Card X WHERE X wikiid "zzz"')[0][0] - self.sexecute('SET A documented_by C WHERE E eid %(a)s, C eid %(c)s', - {'a': aff1, 'c': ec1}) - # link from system to extern source - self.sexecute('SET A documented_by C WHERE E eid %(a)s, C eid %(c)s', - {'a': aff1, 'c': self.ic2}) - rset = self.sexecute('DISTINCT Any DEP WHERE P ref "AFFREF", P documented_by DEP, DEP wikiid LIKE "z%"') - self.assertEqual(sorted(rset.rows), [[ec1], [self.ic2]]) - - def test_nonregr1(self): - ueid = self.session.user.eid - affaire = self.sexecute('Affaire X WHERE X ref "AFFREF"').get_entity(0, 0) - self.sexecute('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR (X require_permission P?, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s', - {'x': affaire.eid, 'u': ueid}) - - def test_nonregr2(self): - iworkflowable = self.session.user.cw_adapt_to('IWorkflowable') - iworkflowable.fire_transition('deactivate') - treid = iworkflowable.latest_trinfo().eid - rset = self.sexecute('Any X ORDERBY D DESC WHERE E eid %(x)s, E wf_info_for X, X modification_date D', - {'x': treid}) - self.assertEqual(len(rset), 1) - self.assertEqual(rset.rows[0], [self.session.user.eid]) - - def test_nonregr3(self): - self.sexecute('DELETE Card X WHERE X eid %(x)s, NOT X multisource_inlined_rel Y', {'x': self.ic1}) - - def test_nonregr4(self): - self.sexecute('Any X,S,U WHERE X in_state S, X todo_by U') - - def test_delete_source(self): - req = self.request() - req.execute('DELETE CWSource S WHERE S name "extern"') - self.commit() - cu = self.session.system_sql("SELECT * FROM entities WHERE source='extern'") - self.assertFalse(cu.fetchall()) - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main()
--- a/server/test/unittest_session.py Fri Jan 10 18:31:07 2014 +0100 +++ b/server/test/unittest_session.py Mon Jan 13 13:47:47 2014 +0100 @@ -17,7 +17,7 @@ # with CubicWeb. If not, see <http://www.gnu.org/licenses/>. from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.server.session import HOOKS_ALLOW_ALL, HOOKS_DENY_ALL +from cubicweb.server.session import HOOKS_ALLOW_ALL, HOOKS_DENY_ALL, Connection class InternalSessionTC(CubicWebTC): def test_dbapi_query(self): @@ -39,10 +39,16 @@ def test_hooks_control(self): session = self.session + # this test check the "old" behavior of session with automatic connection management + # close the default cnx, we do nto want it to interfer with the test + self.cnx.close() + # open a dedicated one + session.set_cnx('Some-random-cnx-unrelated-to-the-default-one') + # go test go self.assertEqual(HOOKS_ALLOW_ALL, session.hooks_mode) self.assertEqual(set(), session.disabled_hook_categories) self.assertEqual(set(), session.enabled_hook_categories) - self.assertEqual(1, len(session._txs)) + self.assertEqual(1, len(session._cnxs)) with session.deny_all_hooks_but('metadata'): self.assertEqual(HOOKS_DENY_ALL, session.hooks_mode) self.assertEqual(set(), session.disabled_hook_categories) @@ -64,12 +70,35 @@ self.assertEqual(set(('metadata',)), session.enabled_hook_categories) # leaving context manager with no transaction running should reset the # transaction local storage (and associated cnxset) - self.assertEqual({}, session._txs) + self.assertEqual({}, session._cnxs) self.assertEqual(None, session.cnxset) self.assertEqual(HOOKS_ALLOW_ALL, session.hooks_mode, session.HOOKS_ALLOW_ALL) self.assertEqual(set(), session.disabled_hook_categories) self.assertEqual(set(), session.enabled_hook_categories) + def test_explicite_connection(self): + with self.session.new_cnx() as cnx: + rset = cnx.execute('Any X LIMIT 1 WHERE X is CWUser') + self.assertEqual(1, len(rset)) + user = rset.get_entity(0, 0) + user.cw_delete() + cnx.rollback() + new_user = cnx.entity_from_eid(user.eid) + self.assertIsNotNone(new_user.login) + self.assertFalse(cnx._open) + + def test_internal_cnx(self): + with self.repo.internal_cnx() as cnx: + rset = cnx.execute('Any X LIMIT 1 WHERE X is CWUser') + self.assertEqual(1, len(rset)) + user = rset.get_entity(0, 0) + user.cw_delete() + cnx.rollback() + new_user = cnx.entity_from_eid(user.eid) + self.assertIsNotNone(new_user.login) + self.assertFalse(cnx._open) + + if __name__ == '__main__': from logilab.common.testlib import unittest_main
--- a/server/test/unittest_undo.py Fri Jan 10 18:31:07 2014 +0100 +++ b/server/test/unittest_undo.py Mon Jan 13 13:47:47 2014 +0100 @@ -20,7 +20,7 @@ from cubicweb import ValidationError from cubicweb.devtools.testlib import CubicWebTC import cubicweb.server.session -from cubicweb.server.session import Transaction as OldTransaction +from cubicweb.server.session import Connection as OldConnection from cubicweb.transaction import * from cubicweb.server.sources.native import UndoTransactionException, _UndoException @@ -35,14 +35,14 @@ self.txuuid = self.commit() def setUp(self): - class Transaction(OldTransaction): + class Connection(OldConnection): """Force undo feature to be turned on in all case""" undo_actions = property(lambda tx: True, lambda x, y:None) - cubicweb.server.session.Transaction = Transaction + cubicweb.server.session.Connection = Connection super(UndoableTransactionTC, self).setUp() def tearDown(self): - cubicweb.server.session.Transaction = OldTransaction + cubicweb.server.session.Connection = OldConnection self.restore_connection() self.session.undo_support = set() super(UndoableTransactionTC, self).tearDown()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/sobjects/services.py Mon Jan 13 13:47:47 2014 +0100 @@ -0,0 +1,102 @@ +# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""Define server side service provided by cubicweb""" + +import threading + +from cubicweb.server import Service +from cubicweb.predicates import match_user_groups + +class StatsService(Service): + """Return a dictionary containing some statistics about the repository + resources usage. + """ + + __regid__ = 'repo_stats' + __select__ = match_user_groups('managers') + + def call(self): + repo = self._cw.repo # Service are repo side only. + results = {} + querier = repo.querier + source = repo.system_source + for size, maxsize, hits, misses, title in ( + (len(querier._rql_cache), repo.config['rql-cache-size'], + querier.cache_hit, querier.cache_miss, 'rqlt_st'), + (len(source._cache), repo.config['rql-cache-size'], + source.cache_hit, source.cache_miss, 'sql'), + ): + results['%s_cache_size' % title] = '%s / %s' % (size, maxsize) + results['%s_cache_hit' % title] = hits + results['%s_cache_miss' % title] = misses + results['%s_cache_hit_percent' % title] = (hits * 100) / (hits + misses) + results['type_source_cache_size'] = len(repo._type_source_cache) + results['extid_cache_size'] = len(repo._extid_cache) + results['sql_no_cache'] = repo.system_source.no_cache + results['nb_open_sessions'] = len(repo._sessions) + results['nb_active_threads'] = threading.activeCount() + looping_tasks = repo._tasks_manager._looping_tasks + results['looping_tasks'] = ', '.join(str(t) for t in looping_tasks) + results['available_cnxsets'] = repo._cnxsets_pool.qsize() + results['threads'] = ', '.join(sorted(str(t) for t in threading.enumerate())) + return results + +class GcStatsService(Service): + """Return a dictionary containing some statistics about the repository + resources usage. + """ + + __regid__ = 'repo_gc_stats' + __select__ = match_user_groups('managers') + + def call(self, nmax=20): + """Return a dictionary containing some statistics about the repository + memory usage. + + This is a public method, not requiring a session id. + + nmax is the max number of (most) referenced object returned as + the 'referenced' result + """ + + from cubicweb._gcdebug import gc_info + from cubicweb.appobject import AppObject + from cubicweb.rset import ResultSet + from cubicweb.dbapi import Connection, Cursor + from cubicweb.web.request import CubicWebRequestBase + from rql.stmts import Union + + lookupclasses = (AppObject, + Union, ResultSet, + Connection, Cursor, + CubicWebRequestBase) + try: + from cubicweb.server.session import Session, InternalSession + lookupclasses += (InternalSession, Session) + except ImportError: + pass # no server part installed + + results = {} + counters, ocounters, garbage = gc_info(lookupclasses, + viewreferrersclasses=()) + values = sorted(counters.iteritems(), key=lambda x: x[1], reverse=True) + results['lookupclasses'] = values + values = sorted(ocounters.iteritems(), key=lambda x: x[1], reverse=True)[:nmax] + results['referenced'] = values + results['unreachable'] = len(garbage) + return results
--- a/test/unittest_dbapi.py Fri Jan 10 18:31:07 2014 +0100 +++ b/test/unittest_dbapi.py Mon Jan 13 13:47:47 2014 +0100 @@ -22,41 +22,40 @@ from logilab.common import tempattr from cubicweb import ConnectionError, cwconfig, NoSelectableObject -from cubicweb.dbapi import ProgrammingError +from cubicweb.dbapi import ProgrammingError, _repo_connect from cubicweb.devtools.testlib import CubicWebTC class DBAPITC(CubicWebTC): def test_public_repo_api(self): - cnx = self.login('anon') + cnx = _repo_connect(self.repo, login='anon', password='anon') self.assertEqual(cnx.get_schema(), self.repo.schema) self.assertEqual(cnx.source_defs(), {'system': {'type': 'native', 'uri': 'system', 'use-cwuri-as-url': False}}) - self.restore_connection() # proper way to close cnx + cnx.close() self.assertRaises(ProgrammingError, cnx.get_schema) self.assertRaises(ProgrammingError, cnx.source_defs) def test_db_api(self): - cnx = self.login('anon') + cnx = _repo_connect(self.repo, login='anon', password='anon') self.assertEqual(cnx.rollback(), None) self.assertEqual(cnx.commit(), None) - self.restore_connection() # proper way to close cnx - #self.assertEqual(cnx.close(), None) + cnx.close() self.assertRaises(ProgrammingError, cnx.rollback) self.assertRaises(ProgrammingError, cnx.commit) self.assertRaises(ProgrammingError, cnx.close) def test_api(self): - cnx = self.login('anon') + cnx = _repo_connect(self.repo, login='anon', password='anon') self.assertEqual(cnx.user(None).login, 'anon') self.assertEqual(cnx.describe(1), (u'CWSource', u'system', None)) - self.restore_connection() # proper way to close cnx + cnx.close() self.assertRaises(ProgrammingError, cnx.user, None) self.assertRaises(ProgrammingError, cnx.describe, 1) def test_shared_data_api(self): - cnx = self.login('anon') + cnx = _repo_connect(self.repo, login='anon', password='anon') self.assertEqual(cnx.get_shared_data('data'), None) cnx.set_shared_data('data', 4) self.assertEqual(cnx.get_shared_data('data'), 4) @@ -65,16 +64,17 @@ self.assertEqual(cnx.get_shared_data('data'), None) cnx.set_shared_data('data', 4) self.assertEqual(cnx.get_shared_data('data'), 4) - self.restore_connection() # proper way to close cnx + cnx.close() self.assertRaises(ProgrammingError, cnx.check) self.assertRaises(ProgrammingError, cnx.set_shared_data, 'data', 0) self.assertRaises(ProgrammingError, cnx.get_shared_data, 'data') def test_web_compatible_request(self): config = cwconfig.CubicWebNoAppConfiguration() - with tempattr(self.cnx.vreg, 'config', config): - self.cnx.use_web_compatible_requests('http://perdu.com') - req = self.cnx.request() + cnx = _repo_connect(self.repo, login='admin', password='gingkow') + with tempattr(cnx.vreg, 'config', config): + cnx.use_web_compatible_requests('http://perdu.com') + req = cnx.request() self.assertEqual(req.base_url(), 'http://perdu.com') self.assertEqual(req.from_controller(), 'view') self.assertEqual(req.relative_path(), '')
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/unittest_repoapi.py Mon Jan 13 13:47:47 2014 +0100 @@ -0,0 +1,88 @@ +# copyright 2013-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see <http://www.gnu.org/licenses/>. +"""unittest for cubicweb.dbapi""" + + +from cubicweb.devtools.testlib import CubicWebTC + +from cubicweb import ProgrammingError +from cubicweb.repoapi import ClientConnection, connect, anonymous_cnx + + +class REPOAPITC(CubicWebTC): + + def test_clt_cnx_basic_usage(self): + """Test that a client connection can be used to access the data base""" + cltcnx = ClientConnection(self.session) + with cltcnx: + # (1) some RQL request + rset = cltcnx.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + # (2) ORM usage + random_user = rset.get_entity(0, 0) + # (3) Write operation + random_user.cw_set(surname=u'babar') + # (4) commit + cltcnx.commit() + rset = cltcnx.execute('''Any X WHERE X is CWUser, + X surname "babar" + ''') + self.assertTrue(rset) + # prepare test for implicite rollback + random_user = rset.get_entity(0, 0) + random_user.cw_set(surname=u'celestine') + # implicite rollback on exit + rset = self.session.execute('''Any X WHERE X is CWUser, + X surname "babar" + ''') + self.assertTrue(rset) + + def test_clt_cnx_life_cycle(self): + """Check that ClientConnection requires explicite open and close + """ + cltcnx = ClientConnection(self.session) + # connection not open yet + with self.assertRaises(ProgrammingError): + cltcnx.execute('Any X WHERE X is CWUser') + # connection open and working + with cltcnx: + cltcnx.execute('Any X WHERE X is CWUser') + # connection closed + with self.assertRaises(ProgrammingError): + cltcnx.execute('Any X WHERE X is CWUser') + + def test_connect(self): + """check that repoapi.connect works and return a usable connection""" + clt_cnx = connect(self.repo, login='admin', password='gingkow') + self.assertEqual('admin', clt_cnx.user.login) + with clt_cnx: + rset = clt_cnx.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + + def test_anonymous_connect(self): + """check that you can get anonymous connection when the data exist""" + + clt_cnx = anonymous_cnx(self.repo) + self.assertEqual('anon', clt_cnx.user.login) + with clt_cnx: + rset = clt_cnx.execute('Any X WHERE X is CWUser') + self.assertTrue(rset) + + + +
--- a/transaction.py Fri Jan 10 18:31:07 2014 +0100 +++ b/transaction.py Mon Jan 13 13:47:47 2014 +0100 @@ -53,7 +53,17 @@ self.datetime = time self.user_eid = ueid # should be set by the dbapi connection - self.req = None + self.req = None # old style + self.cnx = None # new style + + def _execute(self, *args, **kwargs): + """execute a query using either the req or the cnx""" + if self.req is None: + execute = self.cnx.execute + else: + execute = self.req + return execute(*args, **kwargs) + def __repr__(self): return '<Transaction %s by %s on %s>' % ( @@ -63,8 +73,8 @@ """return the user entity which has done the transaction, none if not found. """ - return self.req.execute('Any X WHERE X eid %(x)s', - {'x': self.user_eid}).get_entity(0, 0) + return self._execute('Any X WHERE X eid %(x)s', + {'x': self.user_eid}).get_entity(0, 0) def actions_list(self, public=True): """return an ordered list of action effectued during that transaction @@ -72,7 +82,11 @@ if public is true, return only 'public' action, eg not ones triggered under the cover by hooks. """ - return self.req.cnx.transaction_actions(self.uuid, public) + if self.req is not None: + cnx = self.req.cnx + else: + cnx = self.cnx + return cnx.transaction_actions(self.uuid, public) class AbstractAction(object):
--- a/web/application.py Fri Jan 10 18:31:07 2014 +0100 +++ b/web/application.py Mon Jan 13 13:47:47 2014 +0100 @@ -35,7 +35,7 @@ ValidationError, Unauthorized, Forbidden, AuthenticationError, NoSelectableObject, BadConnectionId, CW_EVENT_MANAGER) -from cubicweb.dbapi import DBAPISession, anonymous_session +from cubicweb.repoapi import anonymous_cnx from cubicweb.web import LOGGER, component from cubicweb.web import ( StatusResponse, DirectResponse, Redirect, NotFound, LogOut, @@ -50,20 +50,23 @@ @contextmanager def anonymized_request(req): - orig_session = req.session - req.set_session(anonymous_session(req.vreg)) + orig_cnx = req.cnx + anon_clt_cnx = anonymous_cnx(orig_cnx._session.repo) + req.set_cnx(anon_clt_cnx) try: - yield req + with anon_clt_cnx: + yield req finally: - req.set_session(orig_session) + req.set_cnx(orig_cnx) class AbstractSessionManager(component.Component): """manage session data associated to a session identifier""" __regid__ = 'sessionmanager' - def __init__(self, vreg): + def __init__(self, repo): + vreg = repo.vreg self.session_time = vreg.config['http-session-time'] or None - self.authmanager = vreg['components'].select('authmanager', vreg=vreg) + self.authmanager = vreg['components'].select('authmanager', repo=repo) interval = (self.session_time or 0) / 2. if vreg.config.anonymous_user()[0] is not None: self.cleanup_anon_session_time = vreg.config['cleanup-anonymous-session-time'] or 5 * 60 @@ -111,8 +114,7 @@ raise NotImplementedError() def open_session(self, req): - """open and return a new session for the given request. The session is - also bound to the request. + """open and return a new session for the given request. raise :exc:`cubicweb.AuthenticationError` if authentication failed (no authentication info found or wrong user/password) @@ -130,8 +132,8 @@ """authenticate user associated to a request and check session validity""" __regid__ = 'authmanager' - def __init__(self, vreg): - self.vreg = vreg + def __init__(self, repo): + self.vreg = repo.vreg def validate_session(self, req, session): """check session validity, reconnecting it to the repository if the @@ -159,9 +161,10 @@ """a session handler using a cookie to store the session identifier""" def __init__(self, appli): + self.repo = appli.repo self.vreg = appli.vreg self.session_manager = self.vreg['components'].select('sessionmanager', - vreg=self.vreg) + repo=self.repo) global SESSION_MANAGER SESSION_MANAGER = self.session_manager if self.vreg.config.mode != 'test': @@ -173,7 +176,7 @@ def reset_session_manager(self): data = self.session_manager.dump_data() self.session_manager = self.vreg['components'].select('sessionmanager', - vreg=self.vreg) + repo=self.repo) self.session_manager.restore_data(data) global SESSION_MANAGER SESSION_MANAGER = self.session_manager @@ -196,66 +199,40 @@ return '__%s_https_session' % self.vreg.config.appid return '__%s_session' % self.vreg.config.appid - def set_session(self, req): - """associate a session to the request + def get_session(self, req): + """Return a session object corresponding to credentials held by the req Session id is searched from : - # form variable - cookie - if no session id is found, open a new session for the connected user - or request authentification as needed + If no session id is found, try opening a new session with credentials + found in the request. - :raise Redirect: if authentication has occurred and succeed + Raises AuthenticationError if no session can be found or created. """ cookie = req.get_cookie() sessioncookie = self.session_cookie(req) try: sessionid = str(cookie[sessioncookie].value) - except KeyError: # no session cookie + session = self.get_session_by_id(req, sessionid) + except (KeyError, InvalidSession): # no valid session cookie session = self.open_session(req) - else: - try: - session = self.get_session(req, sessionid) - except InvalidSession: - # try to open a new session, so we get an anonymous session if - # allowed - session = self.open_session(req) - else: - if not session.cnx: - # session exists but is not bound to a connection. We should - # try to authenticate - loginsucceed = False - try: - if self.open_session(req, allow_no_cnx=False): - loginsucceed = True - except Redirect: - # may be raised in open_session (by postlogin mechanism) - # on successful connection - loginsucceed = True - raise - except AuthenticationError: - # authentication failed, continue to use this session - req.set_session(session) - finally: - if loginsucceed: - # session should be replaced by new session created - # in open_session - self.session_manager.close_session(session) + return session - def get_session(self, req, sessionid): + def get_session_by_id(self, req, sessionid): session = self.session_manager.get_session(req, sessionid) session.mtime = time() return session - def open_session(self, req, allow_no_cnx=True): - session = self.session_manager.open_session(req, allow_no_cnx=allow_no_cnx) + def open_session(self, req): + session = self.session_manager.open_session(req) sessioncookie = self.session_cookie(req) secure = req.https and req.base_url().startswith('https://') req.set_cookie(sessioncookie, session.sessionid, maxage=None, secure=secure) if not session.anonymous_session: - self.session_manager.postlogin(req) + self.session_manager.postlogin(req, session) return session def logout(self, req, goto_url): @@ -277,21 +254,20 @@ The http server will call its main entry point ``application.handle_request``. .. automethod:: cubicweb.web.application.CubicWebPublisher.main_handle_request + + You have to provide both a repository and web-server config at + initialization. In all in one instance both config will be the same. """ - def __init__(self, config, - session_handler_fact=CookieSessionHandler, - vreg=None): + def __init__(self, repo, config, session_handler_fact=CookieSessionHandler): self.info('starting web instance from %s', config.apphome) - if vreg is None: - vreg = cwvreg.CWRegistryStore(config) - self.vreg = vreg - # connect to the repository and get instance's schema - self.repo = config.repository(vreg) - if not vreg.initialized: + self.repo = repo + self.vreg = repo.vreg + # get instance's schema + if not self.vreg.initialized: config.init_cubes(self.repo.get_cubes()) - vreg.init_properties(self.repo.properties()) - vreg.set_schema(self.repo.get_schema()) + self.vreg.init_properties(self.repo.properties()) + self.vreg.set_schema(self.repo.get_schema()) # set the correct publish method if config['query-log-file']: from threading import Lock @@ -310,12 +286,12 @@ self.url_resolver = self.vreg['components'].select('urlpublisher', vreg=self.vreg) - def connect(self, req): - """return a connection for a logged user object according to existing - sessions (i.e. a new connection may be created or an already existing - one may be reused + def get_session(self, req): + """Return a session object corresponding to credentials held by the req + + May raise AuthenticationError. """ - self.session_handler.set_session(req) + return self.session_handler.get_session(req) # publish methods ######################################################### @@ -362,7 +338,24 @@ req.set_header('WWW-Authenticate', [('Basic', {'realm' : realm })], raw=False) content = '' try: - self.connect(req) + try: + session = self.get_session(req) + from cubicweb import repoapi + cnx = repoapi.ClientConnection(session) + req.set_cnx(cnx) + except AuthenticationError: + # Keep the dummy session set at initialisation. + # such session with work to an some extend but raise an + # AuthenticationError on any database access. + import contextlib + @contextlib.contextmanager + def dummy(): + yield + cnx = dummy() + # XXX We want to clean up this approach in the future. But + # several cubes like registration or forgotten password rely on + # this principle. + # DENY https acces for anonymous_user if (req.https and req.session.anonymous_session @@ -373,7 +366,8 @@ # handler try: ### Try to generate the actual request content - content = self.core_handle(req, path) + with cnx: + content = self.core_handle(req, path) # Handle user log-out except LogOut as ex: # When authentification is handled by cookie the code that
--- a/web/request.py Fri Jan 10 18:31:07 2014 +0100 +++ b/web/request.py Mon Jan 13 13:47:47 2014 +0100 @@ -39,6 +39,7 @@ from logilab.common.deprecation import deprecated from logilab.mtconverter import xml_escape +from cubicweb.req import RequestSessionBase from cubicweb.dbapi import DBAPIRequest from cubicweb.uilib import remove_html_tags, js from cubicweb.utils import SizeConstrainedList, HTMLHead, make_uid @@ -82,7 +83,7 @@ -class CubicWebRequestBase(DBAPIRequest): +class _CubicWebRequestBase(RequestSessionBase): """abstract HTTP request, should be extended according to the HTTP backend Immutable attributes that describe the received query and generic configuration """ @@ -94,7 +95,7 @@ :https: boolean, s this a https request :form: Forms value """ - super(CubicWebRequestBase, self).__init__(vreg) + super(_CubicWebRequestBase, self).__init__(vreg) #: (Boolean) Is this an https request. self.https = https #: User interface property (vary with https) (see :ref:`uiprops`) @@ -119,6 +120,19 @@ self.setup_params(form) #: received body self.content = StringIO() + # use header to set default language (may ne overwriten by user one later) + if vreg.config.get('language-negociation', False): + # http negociated language + accepted_languages = self.header_accept_language() + else: + accepted_languages = () + for lang in accepted_languages: + if lang in self.translations: + self.set_language(lang) + break + else: + self.set_default_language(vreg) + # 3. default language #: dictionary that may be used to store request data that has to be #: shared among various components used to publish the request (views, #: controller, application...) @@ -169,7 +183,7 @@ if secure: base_url = self.vreg.config.get('https-url') if base_url is None: - base_url = super(CubicWebRequestBase, self).base_url() + base_url = super(_CubicWebRequestBase, self).base_url() return base_url @property @@ -206,31 +220,6 @@ self.set_page_data('rql_varmaker', varmaker) return varmaker - def set_session(self, session, user=None): - """method called by the session handler when the user is authenticated - or an anonymous connection is open - """ - super(CubicWebRequestBase, self).set_session(session, user) - # set request language - vreg = self.vreg - if self.user: - try: - # 1. user specified language - lang = vreg.typed_value('ui.language', - self.user.properties['ui.language']) - self.set_language(lang) - return - except KeyError: - pass - if vreg.config['language-negociation']: - # 2. http negociated language - for lang in self.header_accept_language(): - if lang in self.translations: - self.set_language(lang) - return - # 3. default language - self.set_default_language(vreg) - # input form parameters management ######################################## # common form parameters which should be protected against html values @@ -725,7 +714,13 @@ if '__message' in kwargs: msg = kwargs.pop('__message') kwargs['_cwmsgid'] = self.set_redirect_message(msg) - return super(CubicWebRequestBase, self).build_url(*args, **kwargs) + if not args: + method = 'view' + if (self.from_controller() == 'view' + and not '_restpath' in kwargs): + method = self.relative_path(includeparams=False) or 'view' + args = (method,) + return super(_CubicWebRequestBase, self).build_url(*args, **kwargs) def url(self, includeparams=True): """return currently accessed url""" @@ -987,6 +982,108 @@ return 'text/html' +class DBAPICubicWebRequestBase(_CubicWebRequestBase, DBAPIRequest): + + def set_session(self, session): + """method called by the session handler when the user is authenticated + or an anonymous connection is open + """ + super(CubicWebRequestBase, self).set_session(session) + # set request language + user_lang = self.user.properties.get('ui.language') + if user_lang is not None: + lang = self.vreg.typed_value('ui.language', user_lang) + self.set_language(lang) + + + +def _cnx_func(name): + def proxy(req, *args, **kwargs): + return getattr(req.cnx, name)(*args, **kwargs) + return proxy + + +class ConnectionCubicWebRequestBase(_CubicWebRequestBase): + + def __init__(self, vreg, https=False, form=None, headers={}): + """""" + self.cnx = None + self.session = None + self.vreg = vreg + try: + # no vreg or config which doesn't handle translations + self.translations = vreg.config.translations + except AttributeError: + self.translations = {} + super(ConnectionCubicWebRequestBase, self).__init__(vreg, https=https, + form=form, headers=headers) + from cubicweb.dbapi import DBAPISession, _NeedAuthAccessMock + self.session = DBAPISession(None) + self.cnx = self.user = _NeedAuthAccessMock() + #: cache entities built during the request + self._eid_cache = {} + + def set_cnx(self, cnx): + self.cnx = cnx + self.session = cnx._session + self._set_user(cnx.user) + # set user language + user_lang = self.user.properties.get('ui.language') + if user_lang is not None: + lang = self.vreg.typed_value('ui.language', user_lang) + self.set_language(lang) + + + def execute(self, *args, **kwargs): + rset = self.cnx.execute(*args, **kwargs) + rset.req = self + return rset + + def set_default_language(self, vreg): + # XXX copy from dbapi + try: + lang = vreg.property_value('ui.language') + except Exception: # property may not be registered + lang = 'en' + try: + self.set_language(lang) + except KeyError: + # this occurs usually during test execution + self._ = self.__ = unicode + self.pgettext = lambda x, y: unicode(y) + + describe = _cnx_func('describe') + source_defs = _cnx_func('source_defs') + get_shared_data = _cnx_func('get_shared_data') + set_shared_data = _cnx_func('set_shared_data') + + # server-side service call ################################################# + + def call_service(self, regid, **kwargs): + return self.cnx.call_service(regid, **kwargs) + + # entities cache management ############################################### + + def entity_cache(self, eid): + return self._eid_cache[eid] + + def set_entity_cache(self, entity): + self._eid_cache[entity.eid] = entity + + def cached_entities(self): + return self._eid_cache.values() + + def drop_entity_cache(self, eid=None): + if eid is None: + self._eid_cache = {} + else: + del self._eid_cache[eid] + + + +CubicWebRequestBase = ConnectionCubicWebRequestBase + + ## HTTP-accept parsers / utilies ############################################## def _mimetype_sort_key(accept_info): """accepted mimetypes must be sorted by : @@ -1083,4 +1180,4 @@ } from cubicweb import set_log_methods -set_log_methods(CubicWebRequestBase, LOGGER) +set_log_methods(_CubicWebRequestBase, LOGGER)
--- a/web/test/data/views.py Fri Jan 10 18:31:07 2014 +0100 +++ b/web/test/data/views.py Mon Jan 13 13:47:47 2014 +0100 @@ -16,32 +16,8 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see <http://www.gnu.org/licenses/>. -from cubicweb.web import Redirect -from cubicweb.web.application import CubicWebPublisher from cubicweb.web.views.ajaxcontroller import ajaxfunc -# proof of concept : monkey patch handle method so that if we are in an -# anonymous session and __fblogin is found is req.form, the user with the -# given login is created if necessary and then a session is opened for that -# user -# NOTE: this require "cookie" authentication mode -def auto_login_handle_request(self, req, path): - if (not req.cnx or req.cnx.anonymous_connection) and req.form.get('__fblogin'): - login = password = req.form.pop('__fblogin') - self.repo.register_user(login, password) - req.form['__login'] = login - req.form['__password'] = password - if req.cnx: - req.cnx.close() - req.cnx = None - try: - self.session_handler.set_session(req) - except Redirect: - pass - assert req.user.login == login - return orig_handle(self, req, path) - - def _recursive_replace_stream_by_content(tree): """ Search for streams (i.e. object that have a 'read' method) in a tree (which branches are lists or tuples), and substitute them by their content, @@ -70,6 +46,3 @@ except Exception, ex: import traceback as tb tb.print_exc(ex) - -orig_handle = CubicWebPublisher.main_handle_request -CubicWebPublisher.main_handle_request = auto_login_handle_request
--- a/web/test/unittest_application.py Fri Jan 10 18:31:07 2014 +0100 +++ b/web/test/unittest_application.py Mon Jan 13 13:47:47 2014 +0100 @@ -30,6 +30,8 @@ from cubicweb.web import LogOut, Redirect, INTERNAL_FIELD_VALUE from cubicweb.web.views.basecontrollers import ViewController from cubicweb.web.application import anonymized_request +from cubicweb.dbapi import DBAPISession, _NeedAuthAccessMock +from cubicweb import repoapi class FakeMapping: """emulates a mapping module""" @@ -175,15 +177,6 @@ self.execute('DELETE X in_group G WHERE X eid %s, G name "guests"' % user.eid) self.commit() - def test_nonregr_publish1(self): - req = self.request(u'CWEType X WHERE X final FALSE, X meta FALSE') - self.app.handle_request(req, 'view') - - def test_nonregr_publish2(self): - req = self.request(u'Any count(N) WHERE N todo_by U, N is Note, U eid %s' - % self.user().eid) - self.app.handle_request(req, 'view') - def test_publish_validation_error(self): req = self.request() user = self.user() @@ -267,9 +260,9 @@ def _test_cleaned(self, kwargs, injected, cleaned): req = self.request(**kwargs) - page = self.app.handle_request(req, 'view') - self.assertFalse(injected in page, (kwargs, injected)) - self.assertTrue(cleaned in page, (kwargs, cleaned)) + page = self.app_handle_request(req, 'view') + self.assertNotIn(injected, page) + self.assertIn(cleaned, page) def test_nonregr_script_kiddies(self): """test against current script injection""" @@ -301,26 +294,14 @@ self.commit() self.assertEqual(vreg.property_value('ui.language'), 'en') - def test_fb_login_concept(self): - """see data/views.py""" - self.set_auth_mode('cookie', 'anon') - self.login('anon') - req = self.request() - origcnx = req.cnx - req.form['__fblogin'] = u'turlututu' - page = self.app.handle_request(req, '') - self.assertFalse(req.cnx is origcnx) - self.assertEqual(req.user.login, 'turlututu') - self.assertTrue('turlututu' in page, page) - req.cnx.close() # avoid warning - # authentication tests #################################################### def test_http_auth_no_anon(self): req, origsession = self.init_authentication('http') self.assertAuthFailure(req) - self.assertRaises(AuthenticationError, self.app_handle_request, req, 'login') - self.assertEqual(req.cnx, None) + self.app.handle_request(req, 'login') + self.assertEqual(401, req.status_out) + clear_cache(req, 'get_authorization') authstr = base64.encodestring('%s:%s' % (self.admlogin, self.admpassword)) req.set_request_header('Authorization', 'basic %s' % authstr) self.assertAuthSuccess(req, origsession) @@ -331,12 +312,13 @@ req, origsession = self.init_authentication('cookie') self.assertAuthFailure(req) try: - form = self.app_handle_request(req, 'login') + form = self.app.handle_request(req, 'login') except Redirect as redir: self.fail('anonymous user should get login form') + clear_cache(req, 'get_authorization') self.assertTrue('__login' in form) self.assertTrue('__password' in form) - self.assertEqual(req.cnx, None) + self.assertFalse(req.cnx) # Mock cnx are False req.form['__login'] = self.admlogin req.form['__password'] = self.admpassword self.assertAuthSuccess(req, origsession) @@ -355,7 +337,7 @@ # req.form['__password'] = self.admpassword # self.assertAuthFailure(req) # option allow-email-login set - origsession.login = address + #origsession.login = address self.set_option('allow-email-login', True) req.form['__login'] = address req.form['__password'] = self.admpassword @@ -373,22 +355,27 @@ raw=True) clear_cache(req, 'get_authorization') # reset session as if it was a new incoming request - req.session = req.cnx = None + req.session = DBAPISession(None) + req.user = req.cnx = _NeedAuthAccessMock + def _test_auth_anon(self, req): - self.app.connect(req) - asession = req.session + asession = self.app.get_session(req) + # important otherwise _reset_cookie will not use the right session + req.set_cnx(repoapi.ClientConnection(asession)) self.assertEqual(len(self.open_sessions), 1) self.assertEqual(asession.login, 'anon') self.assertTrue(asession.anonymous_session) self._reset_cookie(req) def _test_anon_auth_fail(self, req): - self.assertEqual(len(self.open_sessions), 1) - self.app.connect(req) + self.assertEqual(1, len(self.open_sessions)) + session = self.app.get_session(req) + # important otherwise _reset_cookie will not use the right session + req.set_cnx(repoapi.ClientConnection(session)) self.assertEqual(req.message, 'authentication failure') self.assertEqual(req.session.anonymous_session, True) - self.assertEqual(len(self.open_sessions), 1) + self.assertEqual(1, len(self.open_sessions)) self._reset_cookie(req) def test_http_auth_anon_allowed(self): @@ -413,19 +400,19 @@ req.form['__password'] = self.admpassword self.assertAuthSuccess(req, origsession) self.assertRaises(LogOut, self.app_handle_request, req, 'logout') - self.assertEqual(len(self.open_sessions), 0) + self.assertEqual(0, len(self.open_sessions)) def test_anonymized_request(self): req = self.request() - self.assertEqual(req.session.login, self.admlogin) + self.assertEqual(self.admlogin, req.session.user.login) # admin should see anon + admin - self.assertEqual(len(list(req.find_entities('CWUser'))), 2) + self.assertEqual(2, len(list(req.find_entities('CWUser')))) with anonymized_request(req): - self.assertEqual(req.session.login, 'anon') + self.assertEqual('anon', req.session.login, 'anon') # anon should only see anon user - self.assertEqual(len(list(req.find_entities('CWUser'))), 1) - self.assertEqual(req.session.login, self.admlogin) - self.assertEqual(len(list(req.find_entities('CWUser'))), 2) + self.assertEqual(1, len(list(req.find_entities('CWUser')))) + self.assertEqual(self.admlogin, req.session.login) + self.assertEqual(2, len(list(req.find_entities('CWUser')))) def test_non_regr_optional_first_var(self): req = self.request()
--- a/web/test/unittest_session.py Fri Jan 10 18:31:07 2014 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,35 +0,0 @@ -# -*- coding: iso-8859-1 -*- -"""unit tests for cubicweb.web.application - -:organization: Logilab -:copyright: 2001-2011 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.web import InvalidSession - -class SessionTC(CubicWebTC): - - def test_session_expiration(self): - sm = self.app.session_handler.session_manager - # make is if the web session has been opened by the session manager - sm._sessions[self.cnx.sessionid] = self.websession - sessionid = self.websession.sessionid - self.assertEqual(len(sm._sessions), 1) - self.assertEqual(self.websession.sessionid, self.websession.cnx.sessionid) - # fake the repo session is expiring - self.repo.close(sessionid) - try: - # fake an incoming http query with sessionid in session cookie - # don't use self.request() which try to call req.set_session - req = self.requestcls(self.vreg) - self.assertRaises(InvalidSession, sm.get_session, req, sessionid) - self.assertEqual(len(sm._sessions), 0) - finally: - # avoid error in tearDown by telling this connection is closed... - self.cnx._closed = True - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main()
--- a/web/test/unittest_views_basecontrollers.py Fri Jan 10 18:31:07 2014 +0100 +++ b/web/test/unittest_views_basecontrollers.py Mon Jan 13 13:47:47 2014 +0100 @@ -33,7 +33,7 @@ from cubicweb.uilib import rql_for_eid from cubicweb.web import INTERNAL_FIELD_VALUE, Redirect, RequestError, RemoteCallFailed import cubicweb.server.session -from cubicweb.server.session import Transaction as OldTransaction +from cubicweb.server.session import Connection as OldConnection from cubicweb.entities.authobjs import CWUser from cubicweb.web.views.autoform import get_pending_inserts, get_pending_deletes from cubicweb.web.views.basecontrollers import JSonController, xhtmlize, jsonize @@ -901,15 +901,15 @@ class UndoControllerTC(CubicWebTC): def setUp(self): - class Transaction(OldTransaction): + class Connection(OldConnection): """Force undo feature to be turned on in all case""" undo_actions = property(lambda tx: True, lambda x, y:None) - cubicweb.server.session.Transaction = Transaction + cubicweb.server.session.Connection = Connection super(UndoControllerTC, self).setUp() def tearDown(self): super(UndoControllerTC, self).tearDown() - cubicweb.server.session.Transaction = OldTransaction + cubicweb.server.session.Connection = OldConnection def setup_database(self):
--- a/web/test/unittest_views_basetemplates.py Fri Jan 10 18:31:07 2014 +0100 +++ b/web/test/unittest_views_basetemplates.py Mon Jan 13 13:47:47 2014 +0100 @@ -18,16 +18,15 @@ from cubicweb.devtools.testlib import CubicWebTC from cubicweb.devtools.htmlparser import XMLValidator +from cubicweb.dbapi import DBAPISession class LogFormTemplateTC(CubicWebTC): def _login_labels(self): valid = self.content_type_validators.get('text/html', XMLValidator)() - req = self.request() - req.cnx.anonymous_connection = True - page = valid.parse_string(self.vreg['views'].main_template(self.request(), 'login')) - req.cnx.anonymous_connection = False + req = self.requestcls(self.vreg, url='login') + page = valid.parse_string(self.vreg['views'].main_template(req, 'login')) return page.find_tag('label') def test_label(self):
--- a/web/views/authentication.py Fri Jan 10 18:31:07 2014 +0100 +++ b/web/views/authentication.py Mon Jan 13 13:47:47 2014 +0100 @@ -105,9 +105,10 @@ class RepositoryAuthenticationManager(AbstractAuthenticationManager): """authenticate user associated to a request and check session validity""" - def __init__(self, vreg): - super(RepositoryAuthenticationManager, self).__init__(vreg) - self.repo = vreg.config.repository(vreg) + def __init__(self, repo): + super(RepositoryAuthenticationManager, self).__init__(repo) + self.repo = repo + vreg = repo.vreg self.log_queries = vreg.config['query-log-file'] self.authinforetrievers = sorted(vreg['webauth'].possible_objects(vreg), key=lambda x: x.order) @@ -138,13 +139,6 @@ # actual user login if login and session.login != login: raise InvalidSession('login mismatch') - try: - # calling cnx.user() check connection validity, raise - # BadConnectionId on failure - user = session.cnx.user(req) - except BadConnectionId: - raise InvalidSession('bad connection id') - return user def authenticate(self, req): """authenticate user using connection information found in the request, @@ -160,28 +154,24 @@ except NoAuthInfo: continue try: - cnx = self._authenticate(login, authinfo) + session = self._authenticate(login, authinfo) except AuthenticationError: retriever.cleanup_authentication_information(req) continue # the next one may succeed for retriever_ in self.authinforetrievers: - retriever_.authenticated(retriever, req, cnx, login, authinfo) - return cnx, login + retriever_.authenticated(retriever, req, session, login, authinfo) + return session, login # false if no authentication info found, eg this is not an # authentication failure if 'login' in locals(): req.set_message(req._('authentication failure')) login, authinfo = self.anoninfo if login: - cnx = self._authenticate(login, authinfo) - cnx.anonymous_connection = True - return cnx, login + session = self._authenticate(login, authinfo) + return session, login raise AuthenticationError() def _authenticate(self, login, authinfo): - cnxprops = ConnectionProperties(close=False, log=self.log_queries) - cnx = _repo_connect(self.repo, login, cnxprops=cnxprops, **authinfo) - # decorate connection - cnx.vreg = self.vreg - return cnx + sessionid = self.repo.connect(login, **authinfo) + return self.repo._sessions[sessionid]
--- a/web/views/debug.py Fri Jan 10 18:31:07 2014 +0100 +++ b/web/views/debug.py Mon Jan 13 13:47:47 2014 +0100 @@ -97,7 +97,7 @@ w(u'<h2>%s</h2>' % _('Repository')) w(u'<h3>%s</h3>' % _('resources usage')) w(u'<table>') - stats = repo.stats() + stats = self._cw.call_service('repo_stats') for element in sorted(stats): w(u'<tr><th align="left">%s</th><td>%s %s</td></tr>' % (element, xml_escape(unicode(stats[element])),
--- a/web/views/management.py Fri Jan 10 18:31:07 2014 +0100 +++ b/web/views/management.py Mon Jan 13 13:47:47 2014 +0100 @@ -181,7 +181,7 @@ __select__ = none_rset() & match_user_groups('users', 'managers') def call(self): - stats = self._cw.vreg.config.repository(None).stats() + stats = self._cw.call_service('repo_stats') results = [] for element in stats: results.append(u'%s %s' % (element, stats[element]))
--- a/web/views/sessions.py Fri Jan 10 18:31:07 2014 +0100 +++ b/web/views/sessions.py Mon Jan 13 13:47:47 2014 +0100 @@ -26,6 +26,7 @@ from cubicweb.web import InvalidSession, Redirect from cubicweb.web.application import AbstractSessionManager from cubicweb.dbapi import ProgrammingError, DBAPISession +from cubicweb import repoapi class InMemoryRepositorySessionManager(AbstractSessionManager): @@ -53,72 +54,59 @@ if sessionid not in self._sessions: raise InvalidSession() session = self._sessions[sessionid] - if session.cnx: - try: - user = self.authmanager.validate_session(req, session) - except InvalidSession: - # invalid session - self.close_session(session) - raise - # associate the connection to the current request - req.set_session(session, user) + try: + user = self.authmanager.validate_session(req, session) + except InvalidSession: + self.close_session(session) + raise + if session.closed: + self.close_session(session) + raise InvalidSession() return session - def open_session(self, req, allow_no_cnx=True): + def open_session(self, req): """open and return a new session for the given request. The session is also bound to the request. raise :exc:`cubicweb.AuthenticationError` if authentication failed (no authentication info found or wrong user/password) """ - try: - cnx, login = self.authmanager.authenticate(req) - except AuthenticationError: - if allow_no_cnx: - session = DBAPISession(None) - else: - raise - else: - session = DBAPISession(cnx, login) + session, login = self.authmanager.authenticate(req) self._sessions[session.sessionid] = session - # associate the connection to the current request - req.set_session(session) return session - def postlogin(self, req): - """postlogin: the user has been authenticated, redirect to the original - page (index by default) with a welcome message + def postlogin(self, req, session): + """postlogin: the user have been related to a session + + Both req and session are passed to this function because actually + linking the request to the session is not yet done and not the + responsability of this object. """ # Update last connection date # XXX: this should be in a post login hook in the repository, but there # we can't differentiate actual login of automatic session # reopening. Is it actually a problem? if 'last_login_time' in req.vreg.schema: - self._update_last_login_time(req) - req.set_message(req._('welcome %s !') % req.user.login) + self._update_last_login_time(session) + req.set_message(req._('welcome %s !') % session.user.login) - def _update_last_login_time(self, req): + def _update_last_login_time(self, session): # XXX should properly detect missing permission / non writeable source # and avoid "except (RepositoryError, Unauthorized)" below try: - req.execute('SET X last_login_time NOW WHERE X eid %(x)s', - {'x' : req.user.eid}) - req.cnx.commit() + cnx = repoapi.ClientConnection(session) + with cnx: + cnx.execute('SET X last_login_time NOW WHERE X eid %(x)s', + {'x' : session.user.eid}) + cnx.commit() except (RepositoryError, Unauthorized): - req.cnx.rollback() - except Exception: - req.cnx.rollback() - raise + pass def close_session(self, session): """close session on logout or on invalid session detected (expired out, corrupted...) """ self.info('closing http session %s' % session.sessionid) - del self._sessions[session.sessionid] - if session.cnx: - try: - session.cnx.close() - except (ProgrammingError, BadConnectionId): # expired on the repository side - pass - session.cnx = None + self._sessions.pop(session.sessionid, None) + if not session.closed: + session.repo.close(session.id)
--- a/wsgi/handler.py Fri Jan 10 18:31:07 2014 +0100 +++ b/wsgi/handler.py Mon Jan 13 13:47:47 2014 +0100 @@ -21,7 +21,7 @@ from itertools import chain, repeat, izip -from cubicweb import AuthenticationError +from cubicweb import cwreg, AuthenticationError from cubicweb.web import DirectResponse from cubicweb.web.application import CubicWebPublisher from cubicweb.wsgi.request import CubicWebWsgiRequest @@ -96,8 +96,8 @@ parameters. """ - def __init__(self, config): - self.appli = CubicWebPublisher(config) + def __init__(self, repo, config): + self.appli = CubicWebPublisher(repo, config) self.config = config self.base_url = self.config['base-url'] self.url_rewriter = self.appli.vreg['components'].select_or_none('urlrewriter')