--- a/cubicweb/__pkginfo__.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/__pkginfo__.py Fri Jan 12 11:02:52 2018 +0100
@@ -27,8 +27,8 @@
modname = distname = "cubicweb"
-numversion = (3, 25, 4)
-version = '.'.join(str(num) for num in numversion)
+numversion = (3, 26, 0)
+version = '.'.join(str(num) for num in numversion) + '.dev0'
description = "a repository of entities / relations for knowledge management"
author = "Logilab"
--- a/cubicweb/_gcdebug.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/_gcdebug.py Fri Jan 12 11:02:52 2018 +0100
@@ -19,6 +19,8 @@
import gc, types, weakref
+from six import PY2
+
from cubicweb.schema import CubicWebRelationSchema, CubicWebEntitySchema
try:
from cubicweb.web.request import _NeedAuthAccessMock
@@ -29,12 +31,16 @@
IGNORE_CLASSES = (
type, tuple, dict, list, set, frozenset, type(len),
- weakref.ref, weakref.WeakKeyDictionary,
+ weakref.ref,
listiterator,
property, classmethod,
types.ModuleType, types.FunctionType, types.MethodType,
types.MemberDescriptorType, types.GetSetDescriptorType,
)
+if PY2:
+ # weakref.WeakKeyDictionary fails isinstance check on Python 3.5.
+ IGNORE_CLASSES += (weakref.WeakKeyDictionary, )
+
if _NeedAuthAccessMock is not None:
IGNORE_CLASSES = IGNORE_CLASSES + (_NeedAuthAccessMock,)
--- a/cubicweb/dataimport/importer.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/dataimport/importer.py Fri Jan 12 11:02:52 2018 +0100
@@ -244,7 +244,7 @@
if not rschema.final:
# .prepare() should drop other cases from the entity dict
assert rschema.inlined
- if not entity_dict[rtype] in extid2eid:
+ if entity_dict[rtype] not in extid2eid:
return False
# entity is ready, replace all relation's extid by eids
for rtype in entity_dict:
@@ -253,6 +253,21 @@
entity_dict[rtype] = extid2eid[entity_dict[rtype]]
return True
+ def why_not_ready(self, extid2eid):
+ """Return some text explaining why this ext entity is not ready.
+ """
+ assert self._schema, 'prepare() method should be called first on %s' % self
+ # as .prepare has been called, we know that .values only contains subject relation *type* as
+ # key (no more (rtype, role) tuple)
+ schema = self._schema
+ entity_dict = self.values
+ for rtype in entity_dict:
+ rschema = schema.rschema(rtype)
+ if not rschema.final:
+ if entity_dict[rtype] not in extid2eid:
+ return u'inlined relation %s is not present (%s)' % (rtype, entity_dict[rtype])
+ raise AssertionError('this external entity seems actually ready for insertion')
+
class ExtEntitiesImporter(object):
"""This class is responsible for importing externals entities, that is instances of
@@ -413,7 +428,8 @@
"missing data?"]
for ext_entities in queue.values():
for ext_entity in ext_entities:
- msgs.append(str(ext_entity))
+ msg = '{}: {}'.format(ext_entity, ext_entity.why_not_ready(self.extid2eid))
+ msgs.append(msg)
map(error, msgs)
if self.raise_on_error:
raise Exception('\n'.join(msgs))
--- a/cubicweb/dataimport/massive_store.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/dataimport/massive_store.py Fri Jan 12 11:02:52 2018 +0100
@@ -186,6 +186,19 @@
def finish(self):
"""Remove temporary tables and columns."""
+ try:
+ self._finish()
+ self._cnx.commit()
+ except Exception:
+ self._cnx.rollback()
+ raise
+ finally:
+ # delete the meta data table
+ self.sql('DROP TABLE IF EXISTS cwmassive_initialized')
+ self.commit()
+
+ def _finish(self):
+ """Remove temporary tables and columns."""
assert not self.slave_mode, 'finish method should only be called by the master store'
self.logger.info("Start cleaning")
# Get all the initialized etypes/rtypes
@@ -227,9 +240,6 @@
self._tmp_data_cleanup(tmp_tablename, rtype, uuid)
# restore all deleted indexes and constraints
self._dbh.restore_indexes_and_constraints()
- # delete the meta data table
- self.sql('DROP TABLE IF EXISTS cwmassive_initialized')
- self.commit()
def _insert_etype_metadata(self, etype, tmp_tablename):
"""Massive insertion of meta data for `etype`, with new entities in `tmp_tablename`.
--- a/cubicweb/dataimport/test/data-massimport/schema.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/dataimport/test/data-massimport/schema.py Fri Jan 12 11:02:52 2018 +0100
@@ -48,7 +48,7 @@
Entity type for timezone of geonames.
See timeZones.txt
"""
- code = String(maxsize=1024, indexed=True)
+ code = String(maxsize=1024, indexed=True, required=True)
gmt = Float()
dst = Float()
raw_offset = Float()
--- a/cubicweb/dataimport/test/test_massive_store.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/dataimport/test/test_massive_store.py Fri Jan 12 11:02:52 2018 +0100
@@ -16,6 +16,8 @@
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""Massive store test case"""
+import psycopg2
+
from cubicweb.devtools import testlib, PostgresApptestConfiguration
from cubicweb.devtools import startpgcluster, stoppgcluster
from cubicweb.dataimport import ucsvreader, stores
@@ -113,7 +115,7 @@
def test_massimport_etype_metadata(self):
with self.admin_access.repo_cnx() as cnx:
store = MassiveObjectStore(cnx)
- timezone_eid = store.prepare_insert_entity('TimeZone')
+ timezone_eid = store.prepare_insert_entity('TimeZone', code=u'12')
store.prepare_insert_entity('Location', timezone=timezone_eid)
store.flush()
store.commit()
@@ -264,6 +266,16 @@
store.prepare_insert_entity('Location', name=u'toto')
store.finish()
+ def test_delete_metatable_on_integrity_error(self):
+ with self.admin_access.repo_cnx() as cnx:
+ store = MassiveObjectStore(cnx)
+ store.prepare_insert_entity('TimeZone')
+ store.flush()
+ store.commit()
+ with self.assertRaises(psycopg2.IntegrityError):
+ store.finish()
+ self.assertNotIn('cwmassive_initialized', set(self.get_db_descr(cnx)))
+
if __name__ == '__main__':
import unittest
--- a/cubicweb/dataimport/test/test_pgstore.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/dataimport/test/test_pgstore.py Fri Jan 12 11:02:52 2018 +0100
@@ -66,7 +66,7 @@
# test buffer
def test_create_copyfrom_buffer_tuple(self):
- l = long if PY2 else int
+ l = long if PY2 else int # noqa: E741
data = ((42, l(42), 42.42, u'éléphant', DT.date(666, 1, 13), DT.time(6, 6, 6),
DT.datetime(666, 6, 13, 6, 6, 6)),
(6, l(6), 6.6, u'babar', DT.date(2014, 1, 14), DT.time(4, 2, 1),
--- a/cubicweb/dataimport/test/test_sqlgenstore.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/dataimport/test/test_sqlgenstore.py Fri Jan 12 11:02:52 2018 +0100
@@ -16,8 +16,6 @@
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""SQL object store test case"""
-import itertools
-
from cubicweb.dataimport import ucsvreader
from cubicweb.devtools import testlib, PostgresApptestConfiguration
from cubicweb.devtools import startpgcluster, stoppgcluster
@@ -48,7 +46,7 @@
for code, gmt, dst, raw_offset in ucsvreader(open(self.datapath('timeZones.txt'), 'rb'),
delimiter='\t'):
cnx.create_entity('TimeZone', code=code, gmt=float(gmt),
- dst=float(dst), raw_offset=float(raw_offset))
+ dst=float(dst), raw_offset=float(raw_offset))
timezone_code = dict(cnx.execute('Any C, X WHERE X is TimeZone, X code C'))
cnx.commit()
# Push data
@@ -70,12 +68,12 @@
'alternatenames': infos[3],
'latitude': latitude, 'longitude': longitude,
'feature_class': feature_class,
- 'alternate_country_code':infos[9],
+ 'alternate_country_code': infos[9],
'admin_code_3': infos[12],
'admin_code_4': infos[13],
'population': population, 'elevation': elevation,
'gtopo30': gtopo, 'timezone': timezone_code.get(infos[17]),
- 'cwuri': u'http://sws.geonames.org/%s/' % int(infos[0]),
+ 'cwuri': u'http://sws.geonames.org/%s/' % int(infos[0]),
'geonameid': int(infos[0]),
}
store.prepare_insert_entity('Location', **entity)
@@ -98,7 +96,7 @@
def test_sqlgenstore_etype_metadata(self):
with self.admin_access.repo_cnx() as cnx:
store = SQLGenObjectStore(cnx)
- timezone_eid = store.prepare_insert_entity('TimeZone')
+ timezone_eid = store.prepare_insert_entity('TimeZone', code=u'12')
store.prepare_insert_entity('Location', timezone=timezone_eid)
store.flush()
store.commit()
@@ -120,5 +118,5 @@
if __name__ == '__main__':
- from logilab.common.testlib import unittest_main
- unittest_main()
+ import unittest
+ unittest.main()
--- a/cubicweb/devtools/repotest.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/devtools/repotest.py Fri Jan 12 11:02:52 2018 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -24,10 +24,14 @@
from contextlib import contextmanager
from pprint import pprint
-from logilab.common.testlib import SkipTest
-from cubicweb.devtools.testlib import RepoAccess
from cubicweb.entities.authobjs import user_session_cache_key
+from cubicweb.server import set_debug, debugged
+from cubicweb.server.sources.rql2sql import remove_unused_solutions
+
+from .testlib import RepoAccess, BaseTestCase
+from .fake import FakeRequest
+
def tuplify(mylist):
return [tuple(item) for item in mylist]
@@ -129,67 +133,7 @@
schema._eid_index[rdef.eid] = rdef
-from logilab.common.testlib import TestCase, mock_object
-from logilab.database import get_db_helper
-
-from rql import RQLHelper
-
-from cubicweb.devtools.testlib import BaseTestCase
-from cubicweb.devtools.fake import FakeRepo, FakeConfig, FakeRequest, FakeConnection
-from cubicweb.server import set_debug, debugged
-from cubicweb.server.querier import QuerierHelper
-from cubicweb.server.sources.rql2sql import SQLGenerator, remove_unused_solutions
-
-class RQLGeneratorTC(BaseTestCase):
- schema = backend = None # set this in concrete class
-
- @classmethod
- def setUpClass(cls):
- if cls.backend is not None:
- try:
- cls.dbhelper = get_db_helper(cls.backend)
- except ImportError as ex:
- raise SkipTest(str(ex))
-
- def setUp(self):
- self.repo = FakeRepo(self.schema, config=FakeConfig(apphome=self.datadir))
- self.repo.system_source = mock_object(dbdriver=self.backend)
- self.rqlhelper = RQLHelper(self.schema,
- special_relations={'eid': 'uid',
- 'has_text': 'fti'},
- backend=self.backend)
- self.qhelper = QuerierHelper(self.repo, self.schema)
- ExecutionPlan._check_permissions = _dummy_check_permissions
- rqlannotation._select_principal = _select_principal
- if self.backend is not None:
- self.o = SQLGenerator(self.schema, self.dbhelper)
-
- def tearDown(self):
- ExecutionPlan._check_permissions = _orig_check_permissions
- rqlannotation._select_principal = _orig_select_principal
-
- def set_debug(self, debug):
- set_debug(debug)
- def debugged(self, debug):
- return debugged(debug)
-
- def _prepare(self, rql):
- #print '******************** prepare', rql
- union = self.rqlhelper.parse(rql)
- #print '********* parsed', union.as_string()
- self.rqlhelper.compute_solutions(union)
- #print '********* solutions', solutions
- self.rqlhelper.simplify(union)
- #print '********* simplified', union.as_string()
- plan = self.qhelper.plan_factory(union, {}, FakeConnection(self.repo))
- plan.preprocess(union)
- for select in union.children:
- select.solutions.sort(key=lambda x: list(x.items()))
- #print '********* ppsolutions', solutions
- return union
-
-
-class BaseQuerierTC(TestCase):
+class BaseQuerierTC(BaseTestCase):
repo = None # set this in concrete class
def setUp(self):
@@ -198,22 +142,11 @@
self.ueid = self.admin_access._user.eid
assert self.ueid != -1
self.repo._type_cache = {} # clear cache
- self.maxeid = self.get_max_eid()
do_monkey_patch()
self._dumb_sessions = []
- def get_max_eid(self):
- with self.admin_access.cnx() as cnx:
- return cnx.execute('Any MAX(X)')[0][0]
-
- def cleanup(self):
- with self.admin_access.cnx() as cnx:
- cnx.execute('DELETE Any X WHERE X eid > %s' % self.maxeid)
- cnx.commit()
-
def tearDown(self):
undo_monkey_patch()
- self.cleanup()
assert self.admin_access._user.eid != -1
def set_debug(self, debug):
@@ -316,8 +249,6 @@
res = DumbOrderedDict(sorted(res.items(), key=lambda x: [list(y.items()) for y in x[1]]))
return res, restricted
-def _dummy_check_permissions(self, rqlst):
- return {(): rqlst.solutions}, set()
from cubicweb.server import rqlannotation
_orig_select_principal = rqlannotation._select_principal
--- a/cubicweb/hooks/synccomputed.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/hooks/synccomputed.py Fri Jan 12 11:02:52 2018 +0100
@@ -18,8 +18,6 @@
"""Hooks for synchronizing computed attributes"""
-from cubicweb import _
-
from collections import defaultdict
from rql import nodes
@@ -33,6 +31,7 @@
recompute twice the same attribute
"""
containercls = dict
+
def add_data(self, computed_attribute, eid=None):
try:
self._container[computed_attribute].add(eid)
@@ -42,7 +41,7 @@
def precommit_event(self):
for computed_attribute_rdef, eids in self.get_data().items():
attr = computed_attribute_rdef.rtype
- formula = computed_attribute_rdef.formula
+ formula = computed_attribute_rdef.formula
select = self.cnx.repo.vreg.rqlhelper.parse(formula).children[0]
xvar = select.get_variable('X')
select.add_selected(xvar, index=0)
@@ -151,7 +150,7 @@
# depending entity types {dep. etype: {computed rdef: dep. etype attributes}}
self.computed_attribute_by_etype_attrs = defaultdict(lambda: defaultdict(set))
# depending relations def {dep. rdef: [computed rdefs]
- self.computed_attribute_by_relation = defaultdict(list) # by rdef
+ self.computed_attribute_by_relation = defaultdict(list) # by rdef
# Walk through all attributes definitions
for rdef in schema.iter_computed_attributes():
self.computed_attribute_by_etype[rdef.subject.type].append(rdef)
@@ -171,7 +170,8 @@
object_etypes = rschema.objects(subject_etype)
for object_etype in object_etypes:
if rschema.final:
- attr_for_computations = self.computed_attribute_by_etype_attrs[subject_etype]
+ attr_for_computations = self.computed_attribute_by_etype_attrs[
+ subject_etype]
attr_for_computations[rdef].add(rschema.type)
else:
depend_on_rdef = rschema.rdefs[subject_etype, object_etype]
@@ -184,7 +184,7 @@
yield type('%sCreatedHook' % etype,
(EntityWithCACreatedHook,),
{'__regid__': regid,
- '__select__': hook.Hook.__select__ & selector,
+ '__select__': hook.Hook.__select__ & selector,
'computed_attributes': computed_attributes})
def generate_relation_change_hooks(self):
@@ -198,11 +198,11 @@
optimized_computed_attributes.append(
(computed_rdef,
_optimize_on(computed_rdef.formula_select, rdef.rtype))
- )
+ )
yield type('%sModifiedHook' % rdef.rtype,
(RelationInvolvedInCAModifiedHook,),
{'__regid__': regid,
- '__select__': hook.Hook.__select__ & selector,
+ '__select__': hook.Hook.__select__ & selector,
'optimized_computed_attributes': optimized_computed_attributes})
def generate_entity_update_hooks(self):
@@ -212,7 +212,7 @@
yield type('%sModifiedHook' % etype,
(AttributeInvolvedInCAModifiedHook,),
{'__regid__': regid,
- '__select__': hook.Hook.__select__ & selector,
+ '__select__': hook.Hook.__select__ & selector,
'attributes_computed_attributes': attributes_computed_attributes})
--- a/cubicweb/pyramid/auth.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/pyramid/auth.py Fri Jan 12 11:02:52 2018 +0100
@@ -121,16 +121,16 @@
return ()
def remember(self, request, principal, **kw):
- try:
- repo = request.registry['cubicweb.repository']
- with repo.internal_cnx() as cnx:
+ repo = request.registry['cubicweb.repository']
+ with repo.internal_cnx() as cnx:
+ try:
cnx.execute(
"SET U last_login_time %(now)s WHERE U eid %(user)s", {
'now': datetime.datetime.now(),
'user': principal})
cnx.commit()
- except:
- log.exception("Failed to update last_login_time")
+ except Exception:
+ log.exception("Failed to update last_login_time")
return ()
def forget(self, request):
@@ -198,7 +198,8 @@
session_prefix + 'secret', 'notsosecret')
persistent_secret = settings.get(
persistent_prefix + 'secret', 'notsosecret')
- if 'notsosecret' in (session_secret, persistent_secret):
+ if ('notsosecret' in (session_secret, persistent_secret)
+ and config.registry['cubicweb.config'].mode != 'test'):
warnings.warn('''
!! SECURITY WARNING !!
--- a/cubicweb/pyramid/config.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/pyramid/config.py Fri Jan 12 11:02:52 2018 +0100
@@ -47,6 +47,11 @@
options = merge_options(ServerConfiguration.options +
BaseWebConfiguration.options)
+ def init_log(self, *args, **kwargs):
+ """Rely on logging configuration in Pyramid's .ini file, do nothing
+ here.
+ """
+
def write_development_ini(self, cubes):
"""Write a 'development.ini' file into apphome."""
template_fpath = path.join(path.dirname(__file__), 'development.ini.tmpl')
--- a/cubicweb/pyramid/core.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/pyramid/core.py Fri Jan 12 11:02:52 2018 +0100
@@ -369,7 +369,7 @@
try:
session = repo_connect(request, repo, eid=login)
request._cw_cached_session = session
- except:
+ except Exception:
log.exception("Failed")
raise
--- a/cubicweb/pyramid/development.ini.tmpl Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/pyramid/development.ini.tmpl Fri Jan 12 11:02:52 2018 +0100
@@ -37,3 +37,47 @@
[server:main]
use = egg:waitress#main
listen = 127.0.0.1:6543 [::1]:6543
+
+###
+# logging configuration
+# https://docs.pylonsproject.org/projects/pyramid/en/latest/narr/logging.html
+###
+
+[loggers]
+keys = root, logilab, cubicweb, cubicweb_%(cubename)s
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = INFO
+handlers = console
+
+[logger_logilab]
+level = WARNING
+handlers = console
+qualname = logilab
+
+[logger_cubicweb]
+level = INFO
+handlers = console
+qualname = cubicweb
+
+[logger_cubicweb_%(cubename)s]
+level = DEBUG
+handlers = console
+qualname = cubicweb_%(cubename)s
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+class = logilab.common.logging_ext.ColorFormatter
+format = %%(asctime)s - (%%(name)s) %%(levelname)s: %%(message)s
+datefmt = %%Y-%%m-%%d %%H:%%M:%%S
--- a/cubicweb/pyramid/session.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/pyramid/session.py Fri Jan 12 11:02:52 2018 +0100
@@ -102,7 +102,7 @@
def newfn(*args, **kw):
try:
return fn(*args, **kw)
- except:
+ except Exception:
logger.exception("Error in %s" % fn.__name__)
return newfn
return wrap
@@ -255,17 +255,18 @@
secret = settings['cubicweb.session.secret']
except KeyError:
secret = 'notsosecret'
- warnings.warn('''
+ if config.registry['cubicweb.config'].mode != 'test':
+ warnings.warn('''
- !! WARNING !! !! WARNING !!
+ !! WARNING !! !! WARNING !!
- The session cookies are signed with a static secret key.
- To put your own secret key, edit your pyramid.ini file
- and set the 'cubicweb.session.secret' key.
+ The session cookies are signed with a static secret key.
+ To put your own secret key, edit your pyramid.ini file
+ and set the 'cubicweb.session.secret' key.
- YOU SHOULD STOP THIS INSTANCE unless your really know what you
- are doing !!
+ YOU SHOULD STOP THIS INSTANCE unless your really know what you
+ are doing !!
- ''')
+ ''')
session_factory = CWSessionFactory(secret)
config.set_session_factory(session_factory)
--- a/cubicweb/req.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/req.py Fri Jan 12 11:02:52 2018 +0100
@@ -213,26 +213,30 @@
>>> users = find('CWGroup', name=u"users").one()
>>> groups = find('CWGroup').entities()
"""
- parts = ['Any X WHERE X is %s' % etype]
+ parts = ['Any X WHERE X is {0}'.format(etype)]
varmaker = rqlvar_maker(defined='X')
eschema = self.vreg.schema.eschema(etype)
for attr, value in kwargs.items():
if isinstance(value, list) or isinstance(value, tuple):
- raise NotImplementedError("List of values are not supported")
+ raise NotImplementedError(
+ '{0}: list of values are not supported'.format(attr))
if hasattr(value, 'eid'):
kwargs[attr] = value.eid
if attr.startswith('reverse_'):
attr = attr[8:]
- assert attr in eschema.objrels, \
- '%s not in %s object relations' % (attr, eschema)
- parts.append(
- '%(varname)s %(attr)s X, '
- '%(varname)s eid %%(reverse_%(attr)s)s'
- % {'attr': attr, 'varname': next(varmaker)})
+ if attr not in eschema.objrels:
+ raise KeyError('{0} not in {1} object relations'.format(attr, eschema))
+ parts.append('{var} {attr} X, {var} eid %(reverse_{attr})s'.format(
+ var=next(varmaker), attr=attr))
else:
- assert attr in eschema.subjrels, \
- '%s not in %s subject relations' % (attr, eschema)
- parts.append('X %(attr)s %%(%(attr)s)s' % {'attr': attr})
+ rel = eschema.subjrels.get(attr)
+ if rel is None:
+ raise KeyError('{0} not in {1} subject relations'.format(attr, eschema))
+ if rel.final:
+ parts.append('X {attr} %({attr})s'.format(attr=attr))
+ else:
+ parts.append('X {attr} {var}, {var} eid %({attr})s'.format(
+ attr=attr, var=next(varmaker)))
rql = ', '.join(parts)
--- a/cubicweb/rqlrewrite.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/rqlrewrite.py Fri Jan 12 11:02:52 2018 +0100
@@ -206,6 +206,19 @@
return stinfo['relations'] - stinfo['rhsrelations']
+def need_exists(node):
+ """Return true if the given node should be wrapped in an `Exists` node.
+
+ This is true when node isn't already an `Exists` or `Not` node, nor a
+ `And`/`Or` of `Exists` or `Not` nodes.
+ """
+ if isinstance(node, (n.Exists, n.Not)):
+ return False
+ if isinstance(node, (n.Or, n.And)):
+ return need_exists(node.children[0]) or need_exists(node.children[1])
+ return True
+
+
class Unsupported(Exception):
"""raised when an rql expression can't be inserted in some rql query
because it create an unresolvable query (eg no solutions found)
@@ -474,7 +487,7 @@
self.existingvars = existing
def _inserted_root(self, new):
- if not isinstance(new, (n.Exists, n.Not)):
+ if need_exists(new):
new = n.Exists(new)
return new
@@ -649,6 +662,10 @@
# the snippet has introduced some ambiguities, we have to resolve them
# "manually"
variantes = self.build_variantes(newsolutions)
+ # if all ambiguities have been generated by variables within a "NOT
+ # EXISTS()#" or with type explicitly specified, we've nothing to change
+ if not variantes:
+ return newsolutions
# insert "is" where necessary
varexistsmap = {}
self.removing_ambiguity = True
@@ -680,21 +697,32 @@
variantes = set()
for sol in newsolutions:
variante = []
- for key, newvar in self.rewritten.items():
- variante.append((key, sol[newvar]))
- variantes.add(tuple(variante))
- # rebuild variantes as dict
- variantes = [dict(v) for v in variantes]
- # remove variable which have always the same type
- for key in self.rewritten:
- it = iter(variantes)
- etype = next(it)[key]
- for variante in it:
- if variante[key] != etype:
- break
- else:
- for variante in variantes:
- del variante[key]
+ for key, var_name in self.rewritten.items():
+ var = self.select.defined_vars[var_name]
+ # skip variable which are only in a NOT EXISTS
+ if len(var.stinfo['relations']) == 1 and isinstance(var.scope.parent, n.Not):
+ continue
+ # skip variable whose type is already explicitly specified
+ if var.stinfo['typerel']:
+ continue
+ variante.append((key, sol[var_name]))
+ if variante:
+ variantes.add(tuple(variante))
+
+ if variantes:
+ # rebuild variantes as dict
+ variantes = [dict(v) for v in variantes]
+ # remove variable which have always the same type
+ for key in self.rewritten:
+ it = iter(variantes)
+ etype = next(it)[key]
+ for variante in it:
+ if variante[key] != etype:
+ break
+ else:
+ for variante in variantes:
+ del variante[key]
+
return variantes
def _cleanup_inserted(self, node):
--- a/cubicweb/server/querier.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/server/querier.py Fri Jan 12 11:02:52 2018 +0100
@@ -293,7 +293,7 @@
if rqlexpr.check(cnx, eid):
break
else:
- raise Unauthorized('No read acces on %r with eid %i.' % (var, eid))
+ raise Unauthorized('No read access on %r with eid %i.' % (var, eid))
# mark variables protected by an rql expression
restricted_vars.update(localcheck)
# turn local check into a dict key
@@ -346,11 +346,14 @@
def add_relation_def(self, rdef):
"""add an relation definition to build"""
+ edef, rtype, value = rdef
+ if self.schema[rtype].rule:
+ raise QueryError("'%s' is a computed relation" % rtype)
self.r_defs.add(rdef)
- if not isinstance(rdef[0], int):
- self._r_subj_index.setdefault(rdef[0], []).append(rdef)
- if not isinstance(rdef[2], int):
- self._r_obj_index.setdefault(rdef[2], []).append(rdef)
+ if not isinstance(edef, int):
+ self._r_subj_index.setdefault(edef, []).append(rdef)
+ if not isinstance(value, int):
+ self._r_obj_index.setdefault(value, []).append(rdef)
def substitute_entity_def(self, edef, edefs):
"""substitute an incomplete entity definition by a list of complete
--- a/cubicweb/server/repository.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/server/repository.py Fri Jan 12 11:02:52 2018 +0100
@@ -923,10 +923,7 @@
for subjeid, objeid in eids_subj_obj:
print('ADD relation', subjeid, rtype, objeid)
for subjeid, objeid in eids_subj_obj:
- if rtype in relations_by_rtype:
- relations_by_rtype[rtype].append((subjeid, objeid))
- else:
- relations_by_rtype[rtype] = [(subjeid, objeid)]
+ relations_by_rtype.setdefault(rtype, []).append((subjeid, objeid))
if not activintegrity:
continue
# take care to relation of cardinality '?1', as all eids will
--- a/cubicweb/server/sources/rql2sql.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/server/sources/rql2sql.py Fri Jan 12 11:02:52 2018 +0100
@@ -49,7 +49,7 @@
import threading
-from six import PY2
+from six import PY2, text_type
from six.moves import range
from logilab.database import FunctionDescr, SQL_FUNCTIONS_REGISTRY
@@ -1511,7 +1511,7 @@
return self.keyword_map[value]()
if constant.type == 'Substitute':
_id = value
- if PY2 and isinstance(_id, unicode):
+ if PY2 and isinstance(_id, text_type):
_id = _id.encode()
else:
_id = str(id(constant)).replace('-', '', 1)
--- a/cubicweb/server/sqlutils.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/server/sqlutils.py Fri Jan 12 11:02:52 2018 +0100
@@ -543,13 +543,11 @@
return _limit_size(text, maxsize)
cnx.create_function("TEXT_LIMIT_SIZE", 2, limit_size2)
- from logilab.common.date import strptime
-
def weekday(ustr):
try:
- dt = strptime(ustr, '%Y-%m-%d %H:%M:%S')
- except:
- dt = strptime(ustr, '%Y-%m-%d')
+ dt = datetime.strptime(ustr, '%Y-%m-%d %H:%M:%S')
+ except ValueError:
+ dt = datetime.strptime(ustr, '%Y-%m-%d')
# expect sunday to be 1, saturday 7 while weekday method return 0 for
# monday
return (dt.weekday() + 1) % 7
--- a/cubicweb/server/ssplanner.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/server/ssplanner.py Fri Jan 12 11:02:52 2018 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -32,6 +32,7 @@
_CONSTANT = object()
_FROM_SUBSTEP = object()
+
def _extract_const_attributes(plan, rqlst, to_build):
"""add constant values to entity def, mark variables to be selected
"""
@@ -61,9 +62,10 @@
# create a relation between two newly created variables
plan.add_relation_def((edef, rtype, to_build[rhs.name]))
else:
- to_select.setdefault(edef, []).append( (rtype, rhs, 0) )
+ to_select.setdefault(edef, []).append((rtype, rhs, 0))
return to_select
+
def _extract_eid_consts(plan, rqlst):
"""return a dict mapping rqlst variable object to their eid if specified in
the syntax tree
@@ -78,10 +80,10 @@
for rel in rqlst.where.get_nodes(Relation):
# only care for 'eid' relations ...
if (rel.r_type == 'eid'
- # ... that are not part of a NOT clause ...
- and not rel.neged(strict=True)
- # ... and where eid is specified by '=' operator.
- and rel.children[1].operator == '='):
+ # ... that are not part of a NOT clause ...
+ and not rel.neged(strict=True)
+ # ... and where eid is specified by '=' operator.
+ and rel.children[1].operator == '='):
lhs, rhs = rel.get_variable_parts()
if isinstance(rhs, Constant):
eid = int(rhs.eval(plan.args))
@@ -95,6 +97,7 @@
eidconsts[lhs.variable] = eid
return eidconsts
+
def _build_substep_query(select, origrqlst):
"""Finalize substep select query that should be executed to get proper
selection of stuff to insert/update.
@@ -119,6 +122,7 @@
return select
return None
+
class SSPlanner(object):
"""SingleSourcePlanner: build execution plan for rql queries
@@ -160,7 +164,7 @@
# add constant values to entity def, mark variables to be selected
to_select = _extract_const_attributes(plan, rqlst, to_build)
# add necessary steps to add relations and update attributes
- step = InsertStep(plan) # insert each entity and its relations
+ step = InsertStep(plan) # insert each entity and its relations
step.children += self._compute_relation_steps(plan, rqlst, to_select)
return (step,)
@@ -200,7 +204,10 @@
step.children += self._sel_variable_step(plan, rqlst, etype, var)
steps.append(step)
for relation in rqlst.main_relations:
- step = DeleteRelationsStep(plan, relation.r_type)
+ rtype = relation.r_type
+ if self.schema[rtype].rule:
+ raise QueryError("'%s' is a computed relation" % rtype)
+ step = DeleteRelationsStep(plan, rtype)
step.children += self._sel_relation_steps(plan, rqlst, relation)
steps.append(step)
return steps
@@ -237,9 +244,8 @@
getrschema = self.schema.rschema
select = Select() # potential substep query
selectedidx = {} # local state
- attributes = set() # edited attributes
updatedefs = [] # definition of update attributes/relations
- selidx = residx = 0 # substep selection / resulting rset indexes
+ selidx = residx = 0 # substep selection / resulting rset indexes
# search for eid const in the WHERE clause
eidconsts = _extract_eid_consts(plan, rqlst)
# build `updatedefs` describing things to update and add necessary
@@ -250,7 +256,7 @@
% relation.r_type)
lhs, rhs = relation.get_variable_parts()
lhskey = lhs.as_string()
- if not lhskey in selectedidx:
+ if lhskey not in selectedidx:
if lhs.variable in eidconsts:
eid = eidconsts[lhs.variable]
lhsinfo = (_CONSTANT, eid, residx)
@@ -263,7 +269,7 @@
else:
lhsinfo = selectedidx[lhskey][:-1] + (None,)
rhskey = rhs.as_string()
- if not rhskey in selectedidx:
+ if rhskey not in selectedidx:
if isinstance(rhs, Constant):
rhsinfo = (_CONSTANT, rhs.eval(plan.args), residx)
elif getattr(rhs, 'variable', None) in eidconsts:
@@ -278,7 +284,7 @@
else:
rhsinfo = selectedidx[rhskey][:-1] + (None,)
rschema = getrschema(relation.r_type)
- updatedefs.append( (lhsinfo, rhsinfo, rschema) )
+ updatedefs.append((lhsinfo, rhsinfo, rschema))
# the update step
step = UpdateStep(plan, updatedefs)
# when necessary add substep to fetch yet unknown values
@@ -362,7 +368,6 @@
# get results for query
source = cnx.repo.system_source
result = source.syntax_tree_search(cnx, union, args, cachekey)
- #print 'ONEFETCH RESULT %s' % (result)
return result
def mytest_repr(self):
@@ -416,10 +421,10 @@
if rorder == InsertRelationsStep.FINAL:
edef.edited_attribute(rtype, value)
elif rorder == InsertRelationsStep.RELATION:
- self.plan.add_relation_def( (edef, rtype, value) )
+ self.plan.add_relation_def((edef, rtype, value))
edef.querier_pending_relations[(rtype, 'subject')] = value
else:
- self.plan.add_relation_def( (value, rtype, edef) )
+ self.plan.add_relation_def((value, rtype, edef))
edef.querier_pending_relations[(rtype, 'object')] = value
edefs.append(edef)
self.plan.substitute_entity_def(base_edef, edefs)
@@ -455,6 +460,7 @@
cnx.repo.glob_delete_entities(cnx, todelete)
return results
+
class DeleteRelationsStep(Step):
"""step consisting in deleting relations"""
@@ -493,6 +499,9 @@
for i, row in enumerate(result):
newrow = []
for (lhsinfo, rhsinfo, rschema) in self.updatedefs:
+ if rschema.rule:
+ raise QueryError("'%s' is a computed relation"
+ % rschema.type)
lhsval = _handle_relterm(lhsinfo, row, newrow)
rhsval = _handle_relterm(rhsinfo, row, newrow)
if rschema.final or rschema.inlined:
@@ -516,10 +525,11 @@
repo.glob_update_entity(cnx, edited)
return result
+
def _handle_relterm(info, row, newrow):
if info[0] is _CONSTANT:
val = info[1]
- else: # _FROM_SUBSTEP
+ else: # _FROM_SUBSTEP
val = row[info[1]]
if info[-1] is not None:
newrow.append(val)
--- a/cubicweb/server/test/unittest_ldapsource.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/server/test/unittest_ldapsource.py Fri Jan 12 11:02:52 2018 +0100
@@ -105,10 +105,7 @@
sys.stdout.write(stdout)
sys.stderr.write(stderr)
config.info('DONE')
- try:
- shutil.rmtree(cls._tmpdir)
- except:
- pass
+ shutil.rmtree(cls._tmpdir, ignore_errors=True)
def ldapsource(cnx):
--- a/cubicweb/server/test/unittest_migractions.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/server/test/unittest_migractions.py Fri Jan 12 11:02:52 2018 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2017 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -22,9 +22,7 @@
import sys
from datetime import date
from contextlib import contextmanager
-import tempfile
-from logilab.common.testlib import unittest_main, Tags, tag
from logilab.common import tempattr
from yams.constraints import UniqueConstraint
@@ -71,8 +69,6 @@
configcls = MigrationConfig
- tags = CubicWebTC.tags | Tags(('server', 'migration', 'migractions'))
-
def _init_repo(self):
super(MigrationTC, self)._init_repo()
# we have to read schema from the database to get eid for schema entities
@@ -114,15 +110,18 @@
interactive=False)
def table_sql(self, mh, tablename):
- result = mh.sqlexec("SELECT table_name FROM information_schema.tables WHERE LOWER(table_name)=%(table)s",
- {'table': tablename.lower()})
+ result = mh.sqlexec(
+ "SELECT table_name FROM information_schema.tables WHERE LOWER(table_name)=%(table)s",
+ {'table': tablename.lower()})
if result:
return result[0][0]
- return None # no such table
+ return None # no such table
def table_schema(self, mh, tablename):
- result = mh.sqlexec("SELECT column_name, data_type, character_maximum_length FROM information_schema.columns "
- "WHERE LOWER(table_name) = %(table)s", {'table': tablename.lower()})
+ result = mh.sqlexec(
+ "SELECT column_name, data_type, character_maximum_length "
+ "FROM information_schema.columns "
+ "WHERE LOWER(table_name) = %(table)s", {'table': tablename.lower()})
assert result, 'no table %s' % tablename
return dict((x[0], (x[1], x[2])) for x in result)
@@ -181,16 +180,9 @@
whateverorder = migrschema['whatever'].rdef('Note', 'Int').order
for k, v in orderdict.items():
if v >= whateverorder:
- orderdict[k] = v+1
+ orderdict[k] = v + 1
orderdict['whatever'] = whateverorder
self.assertDictEqual(orderdict, orderdict2)
- #self.assertEqual([r.type for r in self.schema['Note'].ordered_relations()],
- # ['modification_date', 'creation_date', 'owned_by',
- # 'eid', 'ecrit_par', 'inline1', 'date', 'type',
- # 'whatever', 'date', 'in_basket'])
- # NB: commit instead of rollback make following test fail with py2.5
- # this sounds like a pysqlite/2.5 bug (the same eid is affected to
- # two different entities)
def test_add_attribute_varchar(self):
with self.mh() as (cnx, mh):
@@ -236,7 +228,7 @@
self.assertEqual(self.schema['mydate'].objects(), ('Date', ))
testdate = date(2005, 12, 13)
eid1 = mh.rqlexec('INSERT Note N')[0][0]
- eid2 = mh.rqlexec('INSERT Note N: N mydate %(mydate)s', {'mydate' : testdate})[0][0]
+ eid2 = mh.rqlexec('INSERT Note N: N mydate %(mydate)s', {'mydate': testdate})[0][0]
d1 = mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1})[0][0]
d2 = mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2})[0][0]
d3 = mh.rqlexec('Any D WHERE X eid %(x)s, X oldstyledefaultdate D', {'x': eid1})[0][0]
@@ -288,8 +280,8 @@
def test_workflow_actions(self):
with self.mh() as (cnx, mh):
- wf = mh.cmd_add_workflow(u'foo', ('Personne', 'Email'),
- ensure_workflowable=False)
+ mh.cmd_add_workflow(u'foo', ('Personne', 'Email'),
+ ensure_workflowable=False)
for etype in ('Personne', 'Email'):
s1 = mh.rqlexec('Any N WHERE WF workflow_of ET, ET name "%s", WF name N' %
etype)[0][0]
@@ -309,18 +301,19 @@
self.assertIn('filed_under2', self.schema)
self.assertTrue(cnx.execute('CWRType X WHERE X name "filed_under2"'))
self.assertEqual(sorted(str(rs) for rs in self.schema['Folder2'].subject_relations()),
- ['created_by', 'creation_date', 'cw_source', 'cwuri',
- 'description', 'description_format',
- 'eid',
- 'filed_under2', 'has_text',
- 'identity', 'in_basket', 'inlined_rel', 'is', 'is_instance_of',
- 'modification_date', 'name', 'owned_by'])
+ ['created_by', 'creation_date', 'cw_source', 'cwuri',
+ 'description', 'description_format',
+ 'eid',
+ 'filed_under2', 'has_text',
+ 'identity', 'in_basket', 'inlined_rel', 'is', 'is_instance_of',
+ 'modification_date', 'name', 'owned_by'])
self.assertCountEqual([str(rs) for rs in self.schema['Folder2'].object_relations()],
['filed_under2', 'identity', 'inlined_rel'])
# Old will be missing as it has been renamed into 'New' in the migrated
# schema while New hasn't been added here.
self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()),
- sorted(str(e) for e in self.schema.entities() if not e.final and e != 'Old'))
+ sorted(str(e) for e in self.schema.entities()
+ if not e.final and e != 'Old'))
self.assertEqual(self.schema['filed_under2'].objects(), ('Folder2',))
eschema = self.schema.eschema('Folder2')
for cstr in eschema.rdef('name').constraints:
@@ -342,7 +335,7 @@
self.assertEqual(rdef.scale, 10)
self.assertEqual(rdef.precision, 18)
fields = self.table_schema(mh, '%sLocation' % SQL_PREFIX)
- self.assertEqual(fields['%snum' % SQL_PREFIX], ('numeric', None)) # XXX
+ self.assertEqual(fields['%snum' % SQL_PREFIX], ('numeric', None)) # XXX
finally:
mh.cmd_drop_cube('fakecustomtype')
mh.drop_entity_type('Numeric')
@@ -357,7 +350,6 @@
wf.add_transition(u'redoit', done, todo)
wf.add_transition(u'markasdone', todo, done)
cnx.commit()
- eschema = self.schema.eschema('Folder2')
mh.cmd_drop_entity_type('Folder2')
self.assertNotIn('Folder2', self.schema)
self.assertFalse(cnx.execute('CWEType X WHERE X name "Folder2"'))
@@ -407,8 +399,9 @@
('Personne',))
self.assertEqual(self.schema['concerne2'].objects(),
('Affaire', ))
- self.assertEqual(self.schema['concerne2'].rdef('Personne', 'Affaire').cardinality,
- '1*')
+ self.assertEqual(
+ self.schema['concerne2'].rdef('Personne', 'Affaire').cardinality,
+ '1*')
mh.cmd_add_relation_definition('Personne', 'concerne2', 'Note')
self.assertEqual(sorted(self.schema['concerne2'].objects()), ['Affaire', 'Note'])
mh.create_entity('Personne', nom=u'tot')
@@ -423,38 +416,38 @@
def test_drop_relation_definition_existant_rtype(self):
with self.mh() as (cnx, mh):
self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
- ['Affaire', 'Personne'])
+ ['Affaire', 'Personne'])
self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
- ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
+ ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
mh.cmd_drop_relation_definition('Personne', 'concerne', 'Affaire')
self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
- ['Affaire'])
+ ['Affaire'])
self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
- ['Division', 'Note', 'Societe', 'SubDivision'])
+ ['Division', 'Note', 'Societe', 'SubDivision'])
mh.cmd_add_relation_definition('Personne', 'concerne', 'Affaire')
self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
- ['Affaire', 'Personne'])
+ ['Affaire', 'Personne'])
self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
- ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
+ ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
# trick: overwrite self.maxeid to avoid deletion of just reintroduced types
self.maxeid = cnx.execute('Any MAX(X)')[0][0]
def test_drop_relation_definition_with_specialization(self):
with self.mh() as (cnx, mh):
self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
- ['Affaire', 'Personne'])
+ ['Affaire', 'Personne'])
self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
- ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
+ ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
mh.cmd_drop_relation_definition('Affaire', 'concerne', 'Societe')
self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
- ['Affaire', 'Personne'])
+ ['Affaire', 'Personne'])
self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
- ['Affaire', 'Note'])
+ ['Affaire', 'Note'])
mh.cmd_add_relation_definition('Affaire', 'concerne', 'Societe')
self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
- ['Affaire', 'Personne'])
+ ['Affaire', 'Personne'])
self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
- ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
+ ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
# trick: overwrite self.maxeid to avoid deletion of just reintroduced types
self.maxeid = cnx.execute('Any MAX(X)')[0][0]
@@ -510,7 +503,6 @@
newconstraints = rdef.constraints
self.assertEqual(len(newconstraints), 0)
- @tag('longrun')
def test_sync_schema_props_perms(self):
with self.mh() as (cnx, mh):
nbrqlexpr_start = cnx.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0]
@@ -550,8 +542,8 @@
# new rql expr to add note entity
eexpr = self._erqlexpr_entity(cnx, 'add', 'Note')
self.assertEqual(eexpr.expression,
- 'X ecrit_part PE, U in_group G, '
- 'PE require_permission P, P name "add_note", P require_group G')
+ 'X ecrit_part PE, U in_group G, '
+ 'PE require_permission P, P name "add_note", P require_group G')
self.assertEqual([et.name for et in eexpr.reverse_add_permission], ['Note'])
self.assertEqual(eexpr.reverse_read_permission, ())
self.assertEqual(eexpr.reverse_delete_permission, ())
@@ -562,9 +554,10 @@
# new rql expr to add ecrit_par relation
rexpr = self._rrqlexpr_entity(cnx, 'add', 'ecrit_par')
self.assertEqual(rexpr.expression,
- 'O require_permission P, P name "add_note", '
- 'U in_group G, P require_group G')
- self.assertEqual([rdef.rtype.name for rdef in rexpr.reverse_add_permission], ['ecrit_par'])
+ 'O require_permission P, P name "add_note", '
+ 'U in_group G, P require_group G')
+ self.assertEqual([rdef.rtype.name for rdef in rexpr.reverse_add_permission],
+ ['ecrit_par'])
self.assertEqual(rexpr.reverse_read_permission, ())
self.assertEqual(rexpr.reverse_delete_permission, ())
# no more rqlexpr to delete and add travaille relation
@@ -594,14 +587,16 @@
# * 2 implicit new for attributes (Note.para, Person.test)
# remaining orphan rql expr which should be deleted at commit (composite relation)
# unattached expressions -> pending deletion on commit
- self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "ERQLExpression",'
- 'NOT ET1 read_permission X, NOT ET2 add_permission X, '
- 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0],
- 7)
- self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "RRQLExpression",'
- 'NOT ET1 read_permission X, NOT ET2 add_permission X, '
- 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0],
- 2)
+ self.assertEqual(
+ cnx.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "ERQLExpression",'
+ 'NOT ET1 read_permission X, NOT ET2 add_permission X, '
+ 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0],
+ 7)
+ self.assertEqual(
+ cnx.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "RRQLExpression",'
+ 'NOT ET1 read_permission X, NOT ET2 add_permission X, '
+ 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0],
+ 2)
# finally
self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0],
nbrqlexpr_start + 1 + 2 + 2 + 2)
@@ -609,8 +604,9 @@
# unique_together test
self.assertEqual(len(self.schema.eschema('Personne')._unique_together), 1)
self.assertCountEqual(self.schema.eschema('Personne')._unique_together[0],
- ('nom', 'prenom', 'datenaiss'))
- rset = cnx.execute('Any C WHERE C is CWUniqueTogetherConstraint, C constraint_of ET, ET name "Personne"')
+ ('nom', 'prenom', 'datenaiss'))
+ rset = cnx.execute('Any C WHERE C is CWUniqueTogetherConstraint, '
+ 'C constraint_of ET, ET name "Personne"')
self.assertEqual(len(rset), 1)
relations = [r.name for r in rset.get_entity(0, 0).relations]
self.assertCountEqual(relations, ('nom', 'prenom', 'datenaiss'))
@@ -632,8 +628,9 @@
return rset.get_entity(0, 0)
def _rrqlexpr_rset(self, cnx, action, ertype):
- rql = 'RQLExpression X WHERE RT is CWRType, RDEF %s_permission X, RT name %%(name)s, RDEF relation_type RT' % action
- return cnx.execute(rql, {'name': ertype})
+ return cnx.execute('RQLExpression X WHERE RT is CWRType, RDEF %s_permission X, '
+ 'RT name %%(name)s, RDEF relation_type RT' % action,
+ {'name': ertype})
def _rrqlexpr_entity(self, cnx, action, ertype):
rset = self._rrqlexpr_rset(cnx, action, ertype)
@@ -653,7 +650,6 @@
finally:
mh.cmd_set_size_constraint('CWEType', 'description', None)
- @tag('longrun')
def test_add_drop_cube_and_deps(self):
with self.mh() as (cnx, mh):
schema = self.repo.schema
@@ -672,15 +668,21 @@
'sender', 'in_thread', 'reply_to', 'data_format'):
self.assertNotIn(ertype, schema)
self.assertEqual(sorted(schema['see_also'].rdefs),
- sorted([('Folder', 'Folder'),
- ('Bookmark', 'Bookmark'),
- ('Bookmark', 'Note'),
- ('Note', 'Note'),
- ('Note', 'Bookmark')]))
- self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'Folder', 'Note'])
- self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'Folder', 'Note'])
- self.assertEqual(cnx.execute('Any X WHERE X pkey "system.version.fakeemail"').rowcount, 0)
- self.assertEqual(cnx.execute('Any X WHERE X pkey "system.version.file"').rowcount, 0)
+ sorted([('Folder', 'Folder'),
+ ('Bookmark', 'Bookmark'),
+ ('Bookmark', 'Note'),
+ ('Note', 'Note'),
+ ('Note', 'Bookmark')]))
+ self.assertEqual(sorted(schema['see_also'].subjects()),
+ ['Bookmark', 'Folder', 'Note'])
+ self.assertEqual(sorted(schema['see_also'].objects()),
+ ['Bookmark', 'Folder', 'Note'])
+ self.assertEqual(
+ cnx.execute('Any X WHERE X pkey "system.version.fakeemail"').rowcount,
+ 0)
+ self.assertEqual(
+ cnx.execute('Any X WHERE X pkey "system.version.file"').rowcount,
+ 0)
finally:
mh.cmd_add_cube('fakeemail')
self.assertIn('fakeemail', self.config.cubes())
@@ -696,23 +698,22 @@
('Bookmark', 'Note'),
('Note', 'Note'),
('Note', 'Bookmark')]))
- self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'EmailThread', 'Folder', 'Note'])
- self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'EmailThread', 'Folder', 'Note'])
+ self.assertEqual(sorted(schema['see_also'].subjects()),
+ ['Bookmark', 'EmailThread', 'Folder', 'Note'])
+ self.assertEqual(sorted(schema['see_also'].objects()),
+ ['Bookmark', 'EmailThread', 'Folder', 'Note'])
from cubes.fakeemail.__pkginfo__ import version as email_version
from cubes.file.__pkginfo__ import version as file_version
- self.assertEqual(cnx.execute('Any V WHERE X value V, X pkey "system.version.fakeemail"')[0][0],
- email_version)
- self.assertEqual(cnx.execute('Any V WHERE X value V, X pkey "system.version.file"')[0][0],
- file_version)
- # trick: overwrite self.maxeid to avoid deletion of just reintroduced
- # types (and their associated tables!)
- self.maxeid = cnx.execute('Any MAX(X)')[0][0]
+ self.assertEqual(
+ cnx.execute('Any V WHERE X value V, X pkey "system.version.fakeemail"')[0][0],
+ email_version)
+ self.assertEqual(
+ cnx.execute('Any V WHERE X value V, X pkey "system.version.file"')[0][0],
+ file_version)
# why this commit is necessary is unclear to me (though without it
# next test may fail complaining of missing tables
cnx.commit()
-
- @tag('longrun')
def test_add_drop_cube_no_deps(self):
with self.mh() as (cnx, mh):
cubes = set(self.config.cubes())
@@ -728,9 +729,6 @@
finally:
mh.cmd_add_cube('fakeemail')
self.assertIn('fakeemail', self.config.cubes())
- # trick: overwrite self.maxeid to avoid deletion of just reintroduced
- # types (and their associated tables!)
- self.maxeid = cnx.execute('Any MAX(X)')[0][0] # XXXXXXX KILL KENNY
# why this commit is necessary is unclear to me (though without it
# next test may fail complaining of missing tables
cnx.commit()
@@ -741,7 +739,6 @@
mh.cmd_drop_cube('file')
self.assertEqual(str(cm.exception), "can't remove cube file, used as a dependency")
- @tag('longrun')
def test_introduce_base_class(self):
with self.mh() as (cnx, mh):
mh.cmd_add_entity_type('Para')
@@ -750,20 +747,21 @@
self.assertEqual(self.schema['Note'].specializes().type, 'Para')
mh.cmd_add_entity_type('Text')
self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()),
- ['Note', 'Text'])
+ ['Note', 'Text'])
self.assertEqual(self.schema['Text'].specializes().type, 'Para')
# test columns have been actually added
- text = cnx.execute('INSERT Text X: X para "hip", X summary "hop", X newattr "momo"').get_entity(0, 0)
- note = cnx.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo", X unique_id "x"').get_entity(0, 0)
- aff = cnx.execute('INSERT Affaire X').get_entity(0, 0)
- self.assertTrue(cnx.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
- {'x': text.eid, 'y': aff.eid}))
+ text = cnx.create_entity('Text', para=u"hip", summary=u"hop", newattr=u"momo")
+ note = cnx.create_entity('Note', para=u"hip", shortpara=u"hop",
+ newattr=u"momo", unique_id=u"x")
+ aff = cnx.create_entity('Affaire')
self.assertTrue(cnx.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
- {'x': note.eid, 'y': aff.eid}))
+ {'x': text.eid, 'y': aff.eid}))
+ self.assertTrue(cnx.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
+ {'x': note.eid, 'y': aff.eid}))
self.assertTrue(cnx.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s',
- {'x': text.eid, 'y': aff.eid}))
+ {'x': text.eid, 'y': aff.eid}))
self.assertTrue(cnx.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s',
- {'x': note.eid, 'y': aff.eid}))
+ {'x': note.eid, 'y': aff.eid}))
# XXX remove specializes by ourselves, else tearDown fails when removing
# Para because of Note inheritance. This could be fixed by putting the
# MemSchemaCWETypeDel(session, name) operation in the
@@ -800,7 +798,7 @@
def test_drop_required_inlined_relation(self):
with self.mh() as (cnx, mh):
bob = mh.cmd_create_entity('Personne', nom=u'bob')
- note = mh.cmd_create_entity('Note', ecrit_par=bob)
+ mh.cmd_create_entity('Note', ecrit_par=bob)
mh.commit()
rdef = mh.fs_schema.rschema('ecrit_par').rdefs[('Note', 'Personne')]
with tempattr(rdef, 'cardinality', '1*'):
@@ -811,7 +809,7 @@
def test_drop_inlined_rdef_delete_data(self):
with self.mh() as (cnx, mh):
- note = mh.cmd_create_entity('Note', ecrit_par=cnx.user.eid)
+ mh.cmd_create_entity('Note', ecrit_par=cnx.user.eid)
mh.commit()
mh.drop_relation_definition('Note', 'ecrit_par', 'CWUser')
self.assertFalse(mh.sqlexec('SELECT * FROM cw_Note WHERE cw_ecrit_par IS NOT NULL'))
@@ -1016,4 +1014,5 @@
if __name__ == '__main__':
- unittest_main()
+ import unittest
+ unittest.main()
--- a/cubicweb/server/test/unittest_querier.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/server/test/unittest_querier.py Fri Jan 12 11:02:52 2018 +0100
@@ -1,5 +1,5 @@
# -*- coding: iso-8859-1 -*-
-# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -19,6 +19,7 @@
"""unit tests for modules cubicweb.server.querier and cubicweb.server.ssplanner
"""
+from contextlib import contextmanager
from datetime import date, datetime, timedelta, tzinfo
import unittest
@@ -112,13 +113,6 @@
setUpClass = classmethod(setUpClass)
tearDownClass = classmethod(tearDownClass)
- def get_max_eid(self):
- # no need for cleanup here
- return None
- def cleanup(self):
- # no need for cleanup here
- pass
-
def test_preprocess_1(self):
with self.admin_access.cnx() as cnx:
reid = cnx.execute('Any X WHERE X is CWRType, X name "owned_by"')[0][0]
@@ -316,6 +310,17 @@
setUpClass = classmethod(setUpClass)
tearDownClass = classmethod(tearDownClass)
+ def setUp(self):
+ super(QuerierTC, self).setUp()
+ with self.admin_access.cnx() as cnx:
+ self.maxeid = cnx.execute('Any MAX(X)')[0][0]
+
+ def tearDown(self):
+ super(QuerierTC, self).tearDown()
+ with self.admin_access.cnx() as cnx:
+ cnx.execute('DELETE Any X WHERE X eid > %s' % self.maxeid)
+ cnx.commit()
+
def test_unknown_eid(self):
# should return an empty result set
self.assertFalse(self.qexecute('Any X WHERE X eid 99999999'))
@@ -1673,5 +1678,33 @@
[[a1.eid]],
cnx.execute('Any A ORDERBY A WHERE U use_email A, U login "admin"').rows)
+ def test_computed_relation_in_write_queries(self):
+ """Computed relations are not allowed in main part of write queries."""
+ @contextmanager
+ def check(cnx):
+ with self.assertRaises(QueryError) as cm:
+ yield
+ self.assertIn("'user_login' is a computed relation",
+ str(cm.exception))
+ cnx.rollback()
+
+ with self.admin_access.cnx() as cnx:
+ person = cnx.create_entity('Personne', nom=u'p')
+ cnx.commit()
+ # create
+ with check(cnx):
+ cnx.execute('INSERT CWUser X: X login "user", X user_login P'
+ ' WHERE P is Personne, P nom "p"')
+ # update
+ bob = self.create_user(cnx, u'bob')
+ with check(cnx):
+ cnx.execute('SET U user_login P WHERE U login "bob", P nom "p"')
+ # delete
+ person.cw_set(login_user=bob)
+ cnx.commit()
+ with check(cnx):
+ cnx.execute('DELETE U user_login P WHERE U login "bob"')
+
+
if __name__ == '__main__':
unittest.main()
--- a/cubicweb/server/test/unittest_rql2sql.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/server/test/unittest_rql2sql.py Fri Jan 12 11:02:52 2018 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -23,30 +23,20 @@
from logilab import database as db
from logilab.common.testlib import mock_object
-from logilab.common.decorators import monkeypatch
from rql import BadRQLQuery
+from rql import RQLHelper
from rql.utils import register_function, FunctionDescr
from cubicweb import devtools
-from cubicweb.devtools.repotest import RQLGeneratorTC
-from cubicweb.server.sources.rql2sql import SQLGenerator, remove_unused_solutions
-
-
-def setUpModule():
- """Monkey-patch the SQL generator to ensure solutions order is predictable."""
- global orig_solutions_sql
- orig_solutions_sql = SQLGenerator._solutions_sql
+from cubicweb.devtools.fake import FakeRepo, FakeConfig, FakeConnection
+from cubicweb.devtools.testlib import BaseTestCase
+from cubicweb.server import rqlannotation
+from cubicweb.server.querier import QuerierHelper, ExecutionPlan
+from cubicweb.server.sources import rql2sql
- @monkeypatch
- def _solutions_sql(self, select, solutions, distinct, needalias):
- return orig_solutions_sql(self, select, sorted(solutions), distinct, needalias)
-
-
-def tearDownModule():
- """Remove monkey-patch done in setUpModule"""
- SQLGenerator._solutions_sql = orig_solutions_sql
-
+_orig_select_principal = rqlannotation._select_principal
+_orig_check_permissions = ExecutionPlan._check_permissions
# add a dumb registered procedure
class stockproc(FunctionDescr):
@@ -1226,8 +1216,67 @@
'''SELECT rel_is0.eid_from
FROM is_relation AS rel_is0
WHERE rel_is0.eid_to=2'''),
+]
- ]
+
+class RQLGeneratorTC(BaseTestCase):
+ schema = backend = None # set this in concrete class
+
+ @classmethod
+ def setUpClass(cls):
+ if cls.backend is not None:
+ try:
+ cls.dbhelper = db.get_db_helper(cls.backend)
+ except ImportError as ex:
+ self.skipTest(str(ex))
+
+ def setUp(self):
+ self.repo = FakeRepo(self.schema, config=FakeConfig(apphome=self.datadir))
+ self.repo.system_source = mock_object(dbdriver=self.backend)
+ self.rqlhelper = RQLHelper(self.schema,
+ special_relations={'eid': 'uid',
+ 'has_text': 'fti'},
+ backend=self.backend)
+ self.qhelper = QuerierHelper(self.repo, self.schema)
+
+ def _dummy_check_permissions(self, rqlst):
+ return {(): rqlst.solutions}, set()
+
+ ExecutionPlan._check_permissions = _dummy_check_permissions
+
+ def _select_principal(scope, relations):
+ def sort_key(something):
+ try:
+ return something.r_type
+ except AttributeError:
+ return (something[0].r_type, something[1])
+ return _orig_select_principal(scope, relations,
+ _sort=lambda rels: sorted(rels, key=sort_key))
+
+ rqlannotation._select_principal = _select_principal
+ if self.backend is not None:
+ self.o = rql2sql.SQLGenerator(self.schema, self.dbhelper)
+
+ def tearDown(self):
+ ExecutionPlan._check_permissions = _orig_check_permissions
+ rqlannotation._select_principal = _orig_select_principal
+
+ def _prepare(self, rql):
+ #print '******************** prepare', rql
+ union = self.rqlhelper.parse(rql)
+ #print '********* parsed', union.as_string()
+ self.rqlhelper.compute_solutions(union)
+ #print '********* solutions', solutions
+ self.rqlhelper.simplify(union)
+ #print '********* simplified', union.as_string()
+ plan = self.qhelper.plan_factory(union, {}, FakeConnection(self.repo))
+ plan.preprocess(union)
+ for select in union.children:
+ select.solutions.sort(key=lambda x: list(x.items()))
+ #print '********* ppsolutions', solutions
+ return union
+
+
class CWRQLTC(RQLGeneratorTC):
backend = 'sqlite'
@@ -1262,6 +1311,7 @@
return '\n'.join(l.strip() for l in text.strip().splitlines())
class PostgresSQLGeneratorTC(RQLGeneratorTC):
+ maxDiff = None
backend = 'postgres'
def setUp(self):
@@ -2272,21 +2322,23 @@
rqlst = mock_object(defined_vars={})
rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True)
rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False)
- self.assertEqual(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'},
- {'A': 'FootGroup', 'B': 'FootTeam'}], None),
- ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'},
- {'A': 'FootGroup', 'B': 'FootTeam'}],
- {}, set('B'))
- )
+ self.assertEqual(
+ rql2sql.remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'},
+ {'A': 'FootGroup', 'B': 'FootTeam'}], None),
+ ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'},
+ {'A': 'FootGroup', 'B': 'FootTeam'}],
+ {}, set('B'))
+ )
def test_invariant_varying(self):
rqlst = mock_object(defined_vars={})
rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True)
rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False)
- self.assertEqual(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'},
- {'A': 'FootGroup', 'B': 'RugbyTeam'}], None),
- ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}], {}, set())
- )
+ self.assertEqual(
+ rql2sql.remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'},
+ {'A': 'FootGroup', 'B': 'RugbyTeam'}], None),
+ ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}], {}, set())
+ )
if __name__ == '__main__':
--- a/cubicweb/server/test/unittest_rqlannotation.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/server/test/unittest_rqlannotation.py Fri Jan 12 11:02:52 2018 +0100
@@ -1,5 +1,5 @@
# -*- coding: iso-8859-1 -*-
-# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -31,14 +31,6 @@
self.__class__.repo = repo
super(SQLGenAnnotatorTC, self).setUp()
- def get_max_eid(self):
- # no need for cleanup here
- return None
-
- def cleanup(self):
- # no need for cleanup here
- pass
-
def test_0_1(self):
with self.admin_access.cnx() as cnx:
rqlst = self._prepare(cnx, 'Any SEN,RN,OEN WHERE X from_entity SE, '
--- a/cubicweb/server/test/unittest_ssplanner.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/server/test/unittest_ssplanner.py Fri Jan 12 11:02:52 2018 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -44,24 +44,26 @@
self.planner = SSPlanner(self.o.schema, self.repo.vreg.rqlhelper)
self.system = self.o._repo.system_source
- def tearDown(self):
- BasePlannerTC.tearDown(self)
-
def test_ordered_ambigous_sol(self):
- self._test('Any XN ORDERBY XN WHERE X name XN, X is IN (Basket, State, Folder)',
- [('OneFetchStep', [('Any XN ORDERBY XN WHERE X name XN, X is IN(Basket, State, Folder)',
- [{'X': 'Basket', 'XN': 'String'},
- {'X': 'State', 'XN': 'String'},
- {'X': 'Folder', 'XN': 'String'}])],
- [])])
+ self._test(
+ 'Any XN ORDERBY XN WHERE X name XN, X is IN (Basket, State, Folder)',
+ [('OneFetchStep', [('Any XN ORDERBY XN WHERE X name XN, '
+ 'X is IN(Basket, State, Folder)',
+ [{'X': 'Basket', 'XN': 'String'},
+ {'X': 'State', 'XN': 'String'},
+ {'X': 'Folder', 'XN': 'String'}])],
+ [])])
def test_groupeded_ambigous_sol(self):
- self._test('Any XN,COUNT(X) GROUPBY XN WHERE X name XN, X is IN (Basket, State, Folder)',
- [('OneFetchStep', [('Any XN,COUNT(X) GROUPBY XN WHERE X name XN, X is IN(Basket, State, Folder)',
- [{'X': 'Basket', 'XN': 'String'},
- {'X': 'State', 'XN': 'String'},
- {'X': 'Folder', 'XN': 'String'}])],
- [])])
+ self._test(
+ 'Any XN,COUNT(X) GROUPBY XN WHERE X name XN, X is IN (Basket, State, Folder)',
+ [('OneFetchStep', [('Any XN,COUNT(X) GROUPBY XN WHERE X name XN, '
+ 'X is IN(Basket, State, Folder)',
+ [{'X': 'Basket', 'XN': 'String'},
+ {'X': 'State', 'XN': 'String'},
+ {'X': 'Folder', 'XN': 'String'}])],
+ [])])
+
if __name__ == '__main__':
import unittest
--- a/cubicweb/test/data/libpython/cubicweb_mycube/__pkginfo__.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/test/data/libpython/cubicweb_mycube/__pkginfo__.py Fri Jan 12 11:02:52 2018 +0100
@@ -19,3 +19,4 @@
"""
distname = 'cubicweb-mycube'
+version = '1.0.0'
--- a/cubicweb/test/unittest_req.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/test/unittest_req.py Fri Jan 12 11:02:52 2018 +0100
@@ -110,15 +110,15 @@
firstname=u'adrien',
in_group=req.find('CWGroup', name=u'users').one())
- l = list(req.find_entities('CWUser', login=u'cdevienne'))
- self.assertEqual(1, len(l))
- self.assertEqual(l[0].firstname, u"Christophe")
+ users = list(req.find_entities('CWUser', login=u'cdevienne'))
+ self.assertEqual(1, len(users))
+ self.assertEqual(users[0].firstname, u"Christophe")
- l = list(req.find_entities('CWUser', login=u'patanok'))
- self.assertEqual(0, len(l))
+ users = list(req.find_entities('CWUser', login=u'patanok'))
+ self.assertEqual(0, len(users))
- l = list(req.find_entities('CWUser'))
- self.assertEqual(4, len(l))
+ users = list(req.find_entities('CWUser'))
+ self.assertEqual(4, len(users))
def test_find(self):
with self.admin_access.web_request() as req:
@@ -132,7 +132,10 @@
firstname=u'adrien',
in_group=req.find('CWGroup', name=u'users').one())
- u = req.find('CWUser', login=u'cdevienne').one()
+ rset = req.find('CWUser', login=u'cdevienne')
+ self.assertEqual(rset.printable_rql(),
+ 'Any X WHERE X is CWUser, X login "cdevienne"')
+ u = rset.one()
self.assertEqual(u.firstname, u"Christophe")
users = list(req.find('CWUser').entities())
@@ -143,17 +146,26 @@
self.assertEqual(len(groups), 1)
self.assertEqual(groups[0].name, u'users')
- users = req.find('CWUser', in_group=groups[0]).entities()
- users = list(users)
+ rset = req.find('CWUser', in_group=groups[0])
+ self.assertEqual(rset.printable_rql(),
+ 'Any X WHERE X is CWUser, X in_group A, '
+ 'A eid {0}'.format(groups[0].eid))
+ users = list(rset.entities())
self.assertEqual(len(users), 2)
- with self.assertRaises(AssertionError):
+ with self.assertRaisesRegexp(
+ KeyError, "^'chapeau not in CWUser subject relations'$"
+ ):
req.find('CWUser', chapeau=u"melon")
- with self.assertRaises(AssertionError):
+ with self.assertRaisesRegexp(
+ KeyError, "^'buddy not in CWUser object relations'$"
+ ):
req.find('CWUser', reverse_buddy=users[0])
- with self.assertRaises(NotImplementedError):
+ with self.assertRaisesRegexp(
+ NotImplementedError, '^in_group: list of values are not supported$'
+ ):
req.find('CWUser', in_group=[1, 2])
--- a/cubicweb/test/unittest_rqlrewrite.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/test/unittest_rqlrewrite.py Fri Jan 12 11:02:52 2018 +0100
@@ -18,15 +18,17 @@
from six import string_types
-from logilab.common.testlib import unittest_main, TestCase
from logilab.common.testlib import mock_object
+from logilab.common.decorators import monkeypatch
from yams import BadSchemaDefinition
from yams.buildobjs import RelationDefinition
from rql import parse, nodes, RQLHelper
from cubicweb import Unauthorized, rqlrewrite, devtools
+from cubicweb.rqlrewrite import RQLRewriter
from cubicweb.schema import RRQLExpression, ERQLExpression
from cubicweb.devtools import repotest
+from cubicweb.devtools.testlib import CubicWebTC, TestCase
def setUpModule(*args):
@@ -40,46 +42,65 @@
'has_text': 'fti'})
repotest.do_monkey_patch()
+
def tearDownModule(*args):
repotest.undo_monkey_patch()
global rqlhelper, schema
del rqlhelper, schema
+
def eid_func_map(eid):
return {1: 'CWUser',
2: 'Card',
3: 'Affaire'}[eid]
+
def _prepare_rewriter(rewriter_cls, kwargs):
class FakeVReg:
schema = schema
+
@staticmethod
def solutions(sqlcursor, rqlst, kwargs):
rqlhelper.compute_solutions(rqlst, {'eid': eid_func_map}, kwargs=kwargs)
+
class rqlhelper:
@staticmethod
def annotate(rqlst):
rqlhelper.annotate(rqlst)
- @staticmethod
- def simplify(mainrqlst, needcopy=False):
- rqlhelper.simplify(rqlst, needcopy)
+
return rewriter_cls(mock_object(vreg=FakeVReg, user=(mock_object(eid=1))))
+
def rewrite(rqlst, snippets_map, kwargs, existingvars=None):
rewriter = _prepare_rewriter(rqlrewrite.RQLRewriter, kwargs)
+ # turn {(V1, V2): constraints} into [(varmap, constraints)]
snippets = []
+ snippet_varmap = {}
for v, exprs in sorted(snippets_map.items()):
- rqlexprs = [isinstance(snippet, string_types)
- and mock_object(snippet_rqlst=parse(u'Any X WHERE '+snippet).children[0],
- expression=u'Any X WHERE '+snippet)
- or snippet
- for snippet in exprs]
- snippets.append((dict([v]), rqlexprs))
+ rqlexprs = []
+ varmap = dict([v])
+ for snippet in exprs:
+ # when the same snippet is impacting several variables, group them
+ # unless there is some conflicts on the snippet's variable name (we
+ # only want that for constraint on relations using both S and O)
+ if snippet in snippet_varmap and not (
+ set(varmap.values()) & set(snippet_varmap[snippet].values())):
+ snippet_varmap[snippet].update(varmap)
+ continue
+ snippet_varmap[snippet] = varmap
+ if isinstance(snippet, string_types):
+ snippet = mock_object(snippet_rqlst=parse(u'Any X WHERE ' + snippet).children[0],
+ expression=u'Any X WHERE ' + snippet)
+ rqlexprs.append(snippet)
+ if rqlexprs:
+ snippets.append((varmap, rqlexprs))
+
rqlhelper.compute_solutions(rqlst.children[0], {'eid': eid_func_map}, kwargs=kwargs)
rewriter.rewrite(rqlst.children[0], snippets, kwargs, existingvars)
check_vrefs(rqlst.children[0])
return rewriter.rewritten
+
def check_vrefs(node):
vrefmaps = {}
selects = []
@@ -88,14 +109,15 @@
try:
vrefmaps[stmt].setdefault(vref.name, set()).add(vref)
except KeyError:
- vrefmaps[stmt] = {vref.name: set( (vref,) )}
+ vrefmaps[stmt] = {vref.name: set((vref,))}
selects.append(stmt)
assert node in selects, (node, selects)
for stmt in selects:
for var in stmt.defined_vars.values():
assert var.stinfo['references']
vrefmap = vrefmaps[stmt]
- assert not (var.stinfo['references'] ^ vrefmap[var.name]), (node.as_string(), var, var.stinfo['references'], vrefmap[var.name])
+ assert not (var.stinfo['references'] ^ vrefmap[var.name]), (
+ node.as_string(), var, var.stinfo['references'], vrefmap[var.name])
class RQLRewriteTC(TestCase):
@@ -109,72 +131,81 @@
def test_base_var(self):
constraint = ('X in_state S, U in_group G, P require_state S,'
- 'P name "read", P require_group G')
+ 'P name "read", P require_group G')
rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'X'): (constraint,)}, {})
- self.assertEqual(rqlst.as_string(),
- u'Any C WHERE C is Card, B eid %(D)s, '
- 'EXISTS(C in_state A, B in_group E, F require_state A, '
- 'F name "read", F require_group E, A is State, E is CWGroup, F is CWPermission)')
+ self.assertEqual(
+ rqlst.as_string(),
+ u'Any C WHERE C is Card, B eid %(D)s, '
+ 'EXISTS(C in_state A, B in_group E, F require_state A, '
+ 'F name "read", F require_group E, A is State, E is CWGroup, F is CWPermission)')
def test_multiple_var(self):
card_constraint = ('X in_state S, U in_group G, P require_state S,'
'P name "read", P require_group G')
affaire_constraints = ('X ref LIKE "PUBLIC%"', 'U in_group G, G name "public"')
- kwargs = {'u':2}
+ kwargs = {'u': 2}
rqlst = parse(u'Any S WHERE S documented_by C, C eid %(u)s')
rewrite(rqlst, {('C', 'X'): (card_constraint,), ('S', 'X'): affaire_constraints},
kwargs)
self.assertMultiLineEqual(
rqlst.as_string(),
u'Any S WHERE S documented_by C, C eid %(u)s, B eid %(D)s, '
- 'EXISTS(C in_state A, B in_group E, F require_state A, '
- 'F name "read", F require_group E, A is State, E is CWGroup, F is CWPermission), '
- '(EXISTS(S ref LIKE "PUBLIC%")) OR (EXISTS(B in_group G, G name "public", G is CWGroup)), '
- 'S is Affaire')
+ 'EXISTS(C in_state A, B in_group E, F require_state A, '
+ 'F name "read", F require_group E, A is State, E is CWGroup, F is CWPermission), '
+ '(EXISTS(S ref LIKE "PUBLIC%")) '
+ 'OR (EXISTS(B in_group G, G name "public", G is CWGroup)), '
+ 'S is Affaire')
self.assertIn('D', kwargs)
def test_or(self):
- constraint = '(X identity U) OR (X in_state ST, CL identity U, CL in_state ST, ST name "subscribed")'
+ constraint = (
+ '(X identity U) OR '
+ '(X in_state ST, CL identity U, CL in_state ST, ST name "subscribed")'
+ )
rqlst = parse(u'Any S WHERE S owned_by C, C eid %(u)s, S is in (CWUser, CWGroup)')
- rewrite(rqlst, {('C', 'X'): (constraint,)}, {'u':1})
- self.assertEqual(rqlst.as_string(),
- 'Any S WHERE S owned_by C, C eid %(u)s, S is IN(CWUser, CWGroup), A eid %(B)s, '
- 'EXISTS((C identity A) OR (C in_state D, E identity A, '
- 'E in_state D, D name "subscribed"), D is State, E is CWUser)')
+ rewrite(rqlst, {('C', 'X'): (constraint,)}, {'u': 1})
+ self.assertEqual(
+ rqlst.as_string(),
+ 'Any S WHERE S owned_by C, C eid %(u)s, S is IN(CWUser, CWGroup), A eid %(B)s, '
+ 'EXISTS((C identity A) OR (C in_state D, E identity A, '
+ 'E in_state D, D name "subscribed"), D is State, E is CWUser)')
def test_simplified_rqlst(self):
constraint = ('X in_state S, U in_group G, P require_state S,'
- 'P name "read", P require_group G')
- rqlst = parse(u'Any 2') # this is the simplified rql st for Any X WHERE X eid 12
+ 'P name "read", P require_group G')
+ rqlst = parse(u'Any 2') # this is the simplified rql st for Any X WHERE X eid 12
rewrite(rqlst, {('2', 'X'): (constraint,)}, {})
- self.assertEqual(rqlst.as_string(),
- u'Any 2 WHERE B eid %(C)s, '
- 'EXISTS(2 in_state A, B in_group D, E require_state A, '
- 'E name "read", E require_group D, A is State, D is CWGroup, E is CWPermission)')
+ self.assertEqual(
+ rqlst.as_string(),
+ u'Any 2 WHERE B eid %(C)s, '
+ 'EXISTS(2 in_state A, B in_group D, E require_state A, '
+ 'E name "read", E require_group D, A is State, D is CWGroup, E is CWPermission)')
def test_optional_var_1(self):
constraint = ('X in_state S, U in_group G, P require_state S,'
- 'P name "read", P require_group G')
+ 'P name "read", P require_group G')
rqlst = parse(u'Any A,C WHERE A documented_by C?')
rewrite(rqlst, {('C', 'X'): (constraint,)}, {})
- self.assertEqual(rqlst.as_string(),
- u'Any A,C WHERE A documented_by C?, A is Affaire '
- 'WITH C BEING '
- '(Any C WHERE EXISTS(C in_state B, D in_group F, G require_state B, G name "read", '
- 'G require_group F), D eid %(A)s, C is Card)')
+ self.assertEqual(
+ rqlst.as_string(),
+ u'Any A,C WHERE A documented_by C?, A is Affaire '
+ 'WITH C BEING '
+ '(Any C WHERE EXISTS(C in_state B, D in_group F, G require_state B, G name "read", '
+ 'G require_group F), D eid %(A)s, C is Card)')
def test_optional_var_2(self):
constraint = ('X in_state S, U in_group G, P require_state S,'
- 'P name "read", P require_group G')
+ 'P name "read", P require_group G')
rqlst = parse(u'Any A,C,T WHERE A documented_by C?, C title T')
rewrite(rqlst, {('C', 'X'): (constraint,)}, {})
- self.assertEqual(rqlst.as_string(),
- u'Any A,C,T WHERE A documented_by C?, A is Affaire '
- 'WITH C,T BEING '
- '(Any C,T WHERE C title T, EXISTS(C in_state B, D in_group F, '
- 'G require_state B, G name "read", G require_group F), '
- 'D eid %(A)s, C is Card)')
+ self.assertEqual(
+ rqlst.as_string(),
+ u'Any A,C,T WHERE A documented_by C?, A is Affaire '
+ 'WITH C,T BEING '
+ '(Any C,T WHERE C title T, EXISTS(C in_state B, D in_group F, '
+ 'G require_state B, G name "read", G require_group F), '
+ 'D eid %(A)s, C is Card)')
def test_optional_var_3(self):
constraint1 = ('X in_state S, U in_group G, P require_state S,'
@@ -182,12 +213,14 @@
constraint2 = 'X in_state S, S name "public"'
rqlst = parse(u'Any A,C,T WHERE A documented_by C?, C title T')
rewrite(rqlst, {('C', 'X'): (constraint1, constraint2)}, {})
- self.assertEqual(rqlst.as_string(),
- u'Any A,C,T WHERE A documented_by C?, A is Affaire '
- 'WITH C,T BEING (Any C,T WHERE C title T, '
- '(EXISTS(C in_state B, D in_group F, G require_state B, G name "read", G require_group F)) '
- 'OR (EXISTS(C in_state E, E name "public")), '
- 'D eid %(A)s, C is Card)')
+ self.assertEqual(
+ rqlst.as_string(),
+ u'Any A,C,T WHERE A documented_by C?, A is Affaire '
+ 'WITH C,T BEING (Any C,T WHERE C title T, '
+ '(EXISTS(C in_state B, D in_group F, G require_state B, '
+ 'G name "read", G require_group F)) '
+ 'OR (EXISTS(C in_state E, E name "public")), '
+ 'D eid %(A)s, C is Card)')
def test_optional_var_4(self):
constraint1 = 'A created_by U, X documented_by A'
@@ -197,29 +230,38 @@
rewrite(rqlst, {('LA', 'X'): (constraint1, constraint2),
('X', 'X'): (constraint3,),
('Y', 'X'): (constraint3,)}, {})
- self.assertEqual(rqlst.as_string(),
- u'Any X,LA,Y WHERE LA? documented_by X, LA concerne Y, B eid %(C)s, '
- 'EXISTS(X created_by B), EXISTS(Y created_by B), '
- 'X is Card, Y is IN(Division, Note, Societe) '
- 'WITH LA BEING (Any LA WHERE (EXISTS(A created_by B, LA documented_by A)) OR (EXISTS(E created_by B, LA concerne E)), '
- 'B eid %(D)s, LA is Affaire)')
-
+ self.assertEqual(
+ rqlst.as_string(),
+ u'Any X,LA,Y WHERE LA? documented_by X, LA concerne Y, B eid %(C)s, '
+ 'EXISTS(X created_by B), EXISTS(Y created_by B), '
+ 'X is Card, Y is IN(Division, Note, Societe) '
+ 'WITH LA BEING (Any LA WHERE (EXISTS(A created_by B, LA documented_by A)) '
+ 'OR (EXISTS(E created_by B, LA concerne E)), '
+ 'B eid %(D)s, LA is Affaire)')
def test_ambiguous_optional_same_exprs(self):
"""See #3013535"""
# see test of the same name in RewriteFullTC: original problem is
# unreproducible here because it actually lies in
# RQLRewriter.insert_local_checks
- rqlst = parse(u'Any A,AR,X,CD WHERE A concerne X?, A ref AR, A eid %(a)s, X creation_date CD')
- rewrite(rqlst, {('X', 'X'): ('X created_by U',),}, {'a': 3})
- self.assertEqual(rqlst.as_string(),
- u'Any A,AR,X,CD WHERE A concerne X?, A ref AR, A eid %(a)s WITH X,CD BEING (Any X,CD WHERE X creation_date CD, EXISTS(X created_by B), B eid %(A)s, X is IN(Division, Note, Societe))')
+ rqlst = parse(u'Any A,AR,X,CD WHERE A concerne X?, A ref AR, '
+ 'A eid %(a)s, X creation_date CD')
+ rewrite(rqlst, {('X', 'X'): ('X created_by U',)}, {'a': 3})
+ self.assertEqual(
+ rqlst.as_string(),
+ u'Any A,AR,X,CD WHERE A concerne X?, A ref AR, A eid %(a)s '
+ 'WITH X,CD BEING (Any X,CD WHERE X creation_date CD, '
+ 'EXISTS(X created_by B), B eid %(A)s, X is IN(Division, Note, Societe))')
def test_ambiguous_optional_same_exprs_constant(self):
- rqlst = parse(u'Any A,AR,X WHERE A concerne X?, A ref AR, A eid %(a)s, X creation_date TODAY')
- rewrite(rqlst, {('X', 'X'): ('X created_by U',),}, {'a': 3})
- self.assertEqual(rqlst.as_string(),
- u'Any A,AR,X WHERE A concerne X?, A ref AR, A eid %(a)s WITH X BEING (Any X WHERE X creation_date TODAY, EXISTS(X created_by B), B eid %(A)s, X is IN(Division, Note, Societe))')
+ rqlst = parse(u'Any A,AR,X WHERE A concerne X?, A ref AR, '
+ 'A eid %(a)s, X creation_date TODAY')
+ rewrite(rqlst, {('X', 'X'): ('X created_by U',)}, {'a': 3})
+ self.assertEqual(
+ rqlst.as_string(),
+ u'Any A,AR,X WHERE A concerne X?, A ref AR, A eid %(a)s '
+ 'WITH X BEING (Any X WHERE X creation_date TODAY, '
+ 'EXISTS(X created_by B), B eid %(A)s, X is IN(Division, Note, Societe))')
def test_optional_var_inlined(self):
c1 = ('X require_permission P')
@@ -229,12 +271,13 @@
('A', 'X'): (c2,),
}, {})
# XXX suboptimal
- self.assertEqual(rqlst.as_string(),
- "Any C,A,R WITH A,C,R BEING "
- "(Any A,C,R WHERE A? inlined_card C, A ref R, "
- "(A is NULL) OR (EXISTS(A inlined_card B, B require_permission D, "
- "B is Card, D is CWPermission)), "
- "A is Affaire, C is Card, EXISTS(C require_permission E, E is CWPermission))")
+ self.assertEqual(
+ rqlst.as_string(),
+ "Any C,A,R WITH A,C,R BEING "
+ "(Any A,C,R WHERE A? inlined_card C, A ref R, "
+ "(A is NULL) OR (EXISTS(A inlined_card B, B require_permission D, "
+ "B is Card, D is CWPermission)), "
+ "A is Affaire, C is Card, EXISTS(C require_permission E, E is CWPermission))")
# def test_optional_var_inlined_has_perm(self):
# c1 = ('X require_permission P')
@@ -249,7 +292,8 @@
def test_optional_var_inlined_imbricated_error(self):
c1 = ('X require_permission P')
c2 = ('X inlined_card O, O require_permission P')
- rqlst = parse(u'Any C,A,R,A2,R2 WHERE A? inlined_card C, A ref R,A2? inlined_card C, A2 ref R2')
+ rqlst = parse(u'Any C,A,R,A2,R2 WHERE A? inlined_card C, A ref R,'
+ 'A2? inlined_card C, A2 ref R2')
self.assertRaises(BadSchemaDefinition,
rewrite, rqlst, {('C', 'X'): (c1,),
('A', 'X'): (c2,),
@@ -258,7 +302,6 @@
def test_optional_var_inlined_linked(self):
c1 = ('X require_permission P')
- c2 = ('X inlined_card O, O require_permission P')
rqlst = parse(u'Any A,W WHERE A inlined_card C?, C inlined_note N, '
'N inlined_affaire W')
rewrite(rqlst, {('C', 'X'): (c1,)}, {})
@@ -295,6 +338,7 @@
self.assertEqual(rqlst.as_string(),
'Any C WHERE C in_state STATE?, C is Card, '
'EXISTS(STATE name "hop"), STATE is State')
+
def test_relation_optimization_2_rhs(self):
snippet = ('TW? subworkflow_exit X, TW name "hop"')
rqlst = parse(u'SubWorkflowExitPoint EXIT WHERE C? subworkflow_exit EXIT')
@@ -362,10 +406,11 @@
def test_unsupported_constraint_2(self):
trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"')
rqlst = parse(u'Any U,T WHERE U is CWUser, T wf_info_for U')
- rewrite(rqlst, {('T', 'X'): (trinfo_constraint, 'X wf_info_for Y, Y in_group G, G name "managers"')}, {})
+ rewrite(rqlst, {('T', 'X'): (trinfo_constraint,
+ 'X wf_info_for Y, Y in_group G, G name "managers"')}, {})
self.assertEqual(rqlst.as_string(),
u'Any U,T WHERE U is CWUser, T wf_info_for U, '
- 'EXISTS(U in_group B, B name "managers", B is CWGroup), T is TrInfo')
+ u'EXISTS(U in_group B, B name "managers", B is CWGroup), T is TrInfo')
def test_unsupported_constraint_3(self):
self.skipTest('raise unauthorized for now')
@@ -379,17 +424,20 @@
constraint = ('X concerne Y')
rqlst = parse(u'Affaire X')
rewrite(rqlst, {('X', 'X'): (constraint,)}, {})
- self.assertEqual(rqlst.as_string(),
- u"Any X WHERE X is Affaire, ((EXISTS(X concerne A, A is Division)) OR (EXISTS(X concerne C, C is Societe))) OR (EXISTS(X concerne B, B is Note))")
+ self.assertEqual(
+ rqlst.as_string(),
+ u"Any X WHERE X is Affaire, ((EXISTS(X concerne A, A is Division)) "
+ "OR (EXISTS(X concerne C, C is Societe))) OR (EXISTS(X concerne B, B is Note))")
def test_add_ambiguity_outerjoin(self):
constraint = ('X concerne Y')
rqlst = parse(u'Any X,C WHERE X? documented_by C')
rewrite(rqlst, {('X', 'X'): (constraint,)}, {})
# ambiguity are kept in the sub-query, no need to be resolved using OR
- self.assertEqual(rqlst.as_string(),
- u"Any X,C WHERE X? documented_by C, C is Card WITH X BEING (Any X WHERE EXISTS(X concerne A), X is Affaire)")
-
+ self.assertEqual(
+ rqlst.as_string(),
+ u"Any X,C WHERE X? documented_by C, C is Card "
+ "WITH X BEING (Any X WHERE EXISTS(X concerne A), X is Affaire)")
def test_rrqlexpr_nonexistant_subject_1(self):
constraint = RRQLExpression('S owned_by U')
@@ -418,26 +466,33 @@
'Any C WHERE C is Card, B eid %(D)s, EXISTS(A owned_by B, A is Card)')
rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SOU')
- self.assertEqual(rqlst.as_string(),
- 'Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A, D owned_by A, D is Card)')
+ self.assertEqual(
+ rqlst.as_string(),
+ 'Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A, D owned_by A, D is Card)')
def test_rrqlexpr_nonexistant_subject_3(self):
constraint = RRQLExpression('U in_group G, G name "users"')
rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU')
- self.assertEqual(rqlst.as_string(),
- u'Any C WHERE C is Card, A eid %(B)s, EXISTS(A in_group D, D name "users", D is CWGroup)')
+ self.assertEqual(
+ rqlst.as_string(),
+ u'Any C WHERE C is Card, A eid %(B)s, '
+ 'EXISTS(A in_group D, D name "users", D is CWGroup)')
def test_rrqlexpr_nonexistant_subject_4(self):
constraint = RRQLExpression('U in_group G, G name "users", S owned_by U')
rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU')
- self.assertEqual(rqlst.as_string(),
- u'Any C WHERE C is Card, A eid %(B)s, EXISTS(A in_group D, D name "users", C owned_by A, D is CWGroup)')
+ self.assertEqual(
+ rqlst.as_string(),
+ u'Any C WHERE C is Card, A eid %(B)s, '
+ 'EXISTS(A in_group D, D name "users", C owned_by A, D is CWGroup)')
rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'OU')
- self.assertEqual(rqlst.as_string(),
- u'Any C WHERE C is Card, A eid %(B)s, EXISTS(A in_group D, D name "users", D is CWGroup)')
+ self.assertEqual(
+ rqlst.as_string(),
+ u'Any C WHERE C is Card, A eid %(B)s, '
+ 'EXISTS(A in_group D, D name "users", D is CWGroup)')
def test_rrqlexpr_nonexistant_subject_5(self):
constraint = RRQLExpression('S owned_by Z, O owned_by Z, O is Card')
@@ -450,22 +505,28 @@
constraint = ERQLExpression('X owned_by Z, Z login "hop"', 'X')
rqlst = parse(u'Affaire A WHERE NOT EXISTS(A documented_by C)')
rewrite(rqlst, {('C', 'X'): (constraint,)}, {}, 'X')
- self.assertEqual(rqlst.as_string(),
- u'Any A WHERE NOT EXISTS(A documented_by C, EXISTS(C owned_by B, B login "hop", B is CWUser), C is Card), A is Affaire')
+ self.assertEqual(
+ rqlst.as_string(),
+ u'Any A WHERE NOT EXISTS(A documented_by C, '
+ 'EXISTS(C owned_by B, B login "hop", B is CWUser), C is Card), A is Affaire')
def test_rqlexpr_not_relation_1_2(self):
constraint = ERQLExpression('X owned_by Z, Z login "hop"', 'X')
rqlst = parse(u'Affaire A WHERE NOT EXISTS(A documented_by C)')
rewrite(rqlst, {('A', 'X'): (constraint,)}, {}, 'X')
- self.assertEqual(rqlst.as_string(),
- u'Any A WHERE NOT EXISTS(A documented_by C, C is Card), A is Affaire, EXISTS(A owned_by B, B login "hop", B is CWUser)')
+ self.assertEqual(
+ rqlst.as_string(),
+ u'Any A WHERE NOT EXISTS(A documented_by C, C is Card), A is Affaire, '
+ 'EXISTS(A owned_by B, B login "hop", B is CWUser)')
def test_rqlexpr_not_relation_2(self):
constraint = ERQLExpression('X owned_by Z, Z login "hop"', 'X')
rqlst = rqlhelper.parse(u'Affaire A WHERE NOT A documented_by C', annotate=False)
rewrite(rqlst, {('C', 'X'): (constraint,)}, {}, 'X')
- self.assertEqual(rqlst.as_string(),
- u'Any A WHERE NOT EXISTS(A documented_by C, EXISTS(C owned_by B, B login "hop", B is CWUser), C is Card), A is Affaire')
+ self.assertEqual(
+ rqlst.as_string(),
+ u'Any A WHERE NOT EXISTS(A documented_by C, '
+ 'EXISTS(C owned_by B, B login "hop", B is CWUser), C is Card), A is Affaire')
def test_rqlexpr_multiexpr_outerjoin(self):
c1 = ERQLExpression('X owned_by Z, Z login "hop"', 'X')
@@ -473,11 +534,12 @@
c3 = ERQLExpression('X owned_by Z, Z login "momo"', 'X')
rqlst = rqlhelper.parse(u'Any A WHERE A documented_by C?', annotate=False)
rewrite(rqlst, {('C', 'X'): (c1, c2, c3)}, {}, 'X')
- self.assertEqual(rqlst.as_string(),
- u'Any A WHERE A documented_by C?, A is Affaire '
- 'WITH C BEING (Any C WHERE ((EXISTS(C owned_by B, B login "hop")) '
- 'OR (EXISTS(C owned_by D, D login "momo"))) '
- 'OR (EXISTS(C owned_by A, A login "hip")), C is Card)')
+ self.assertEqual(
+ rqlst.as_string(),
+ u'Any A WHERE A documented_by C?, A is Affaire '
+ 'WITH C BEING (Any C WHERE ((EXISTS(C owned_by B, B login "hop")) '
+ 'OR (EXISTS(C owned_by D, D login "momo"))) '
+ 'OR (EXISTS(C owned_by A, A login "hip")), C is Card)')
def test_multiple_erql_one_bad(self):
#: reproduce bug #2236985
@@ -503,7 +565,36 @@
'Any O WHERE S use_email O, S is CWUser, O is EmailAddress, '
'EXISTS(NOT S in_group A, A name "guests", A is CWGroup)')
-from cubicweb.devtools.testlib import CubicWebTC
+ def test_ambiguous_constraint_not_exists(self):
+ state_constraint = (
+ 'NOT EXISTS(A require_permission S) '
+ 'OR EXISTS(B require_permission S, B is Card, O name "state1")'
+ 'OR EXISTS(C require_permission S, C is Note, O name "state2")'
+ )
+ rqlst = parse(u'Any P WHERE NOT P require_state S')
+ rewrite(rqlst, {('P', 'S'): (state_constraint,), ('S', 'O'): (state_constraint,)}, {})
+ self.assertMultiLineEqual(
+ rqlst.as_string(),
+ u'Any P WHERE NOT P require_state S, '
+ '((NOT EXISTS(A require_permission P, A is IN(Card, Note)))'
+ ' OR (EXISTS(B require_permission P, B is Card, S name "state1")))'
+ ' OR (EXISTS(C require_permission P, C is Note, S name "state2")), '
+ 'P is CWPermission, S is State')
+
+ def test_ambiguous_using_is_in_function(self):
+ state_constraint = (
+ 'NOT EXISTS(A require_permission S) '
+ 'OR EXISTS(B require_permission S, B is IN (Card, Note), O name "state1")'
+ )
+ rqlst = parse(u'Any P WHERE NOT P require_state S')
+ rewrite(rqlst, {('P', 'S'): (state_constraint,), ('S', 'O'): (state_constraint,)}, {})
+ self.assertMultiLineEqual(
+ rqlst.as_string(),
+ u'Any P WHERE NOT P require_state S, '
+ '(NOT EXISTS(A require_permission P, A is IN(Card, Note))) '
+ 'OR (EXISTS(B require_permission P, B is IN(Card, Note), S name "state1")), '
+ 'P is CWPermission, S is State')
+
class RewriteFullTC(CubicWebTC):
appid = 'data-rewrite'
@@ -512,7 +603,7 @@
if args is None:
args = {}
querier = self.repo.querier
- union = parse(rql) # self.vreg.parse(rql, annotate=True)
+ union = parse(rql) # self.vreg.parse(rql, annotate=True)
with self.admin_access.repo_cnx() as cnx:
self.vreg.solutions(cnx, union, args)
querier._annotate(union)
@@ -546,7 +637,6 @@
union = self.process('Any A,AR,X,CD WHERE A concerne X?, A ref AR, X creation_date CD')
self.assertEqual(union.as_string(), 'not generated today')
-
def test_xxxx(self):
edef1 = self.schema['Societe']
edef2 = self.schema['Division']
@@ -564,12 +654,11 @@
def test_question_mark_attribute_snippet(self):
# see #3661918
- from cubicweb.rqlrewrite import RQLRewriter
- from logilab.common.decorators import monkeypatch
repotest.undo_monkey_patch()
orig_insert_snippets = RQLRewriter.insert_snippets
# patch insert_snippets and not rewrite, insert_snippets is already
# monkey patches (see above setupModule/repotest)
+
@monkeypatch(RQLRewriter)
def insert_snippets(self, snippets, varexistsmap=None):
# crash occurs if snippets are processed in a specific order, force
@@ -632,21 +721,21 @@
'C role D, D name "illustrator")',
rqlst.as_string())
-
def test_rewrite2(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
- 'C manifestation O, C role R, R name "illustrator"'}
+ 'C manifestation O, C role R, R name "illustrator"'}
rqlst = rqlhelper.parse(u'Any A,B WHERE A illustrator_of B, C require_permission R, S'
'require_state O')
rule_rewrite(rqlst, rules)
- self.assertEqual('Any A,B WHERE C require_permission R, S require_state O, '
- 'D is Contribution, D contributor A, D manifestation B, D role E, '
- 'E name "illustrator"',
- rqlst.as_string())
+ self.assertEqual(
+ 'Any A,B WHERE C require_permission R, S require_state O, '
+ 'D is Contribution, D contributor A, D manifestation B, D role E, '
+ 'E name "illustrator"',
+ rqlst.as_string())
def test_rewrite3(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
- 'C manifestation O, C role R, R name "illustrator"'}
+ 'C manifestation O, C role R, R name "illustrator"'}
rqlst = rqlhelper.parse(u'Any A,B WHERE E require_permission T, A illustrator_of B')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE E require_permission T, '
@@ -656,7 +745,7 @@
def test_rewrite4(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
- 'C manifestation O, C role R, R name "illustrator"'}
+ 'C manifestation O, C role R, R name "illustrator"'}
rqlst = rqlhelper.parse(u'Any A,B WHERE C require_permission R, A illustrator_of B')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE C require_permission R, '
@@ -666,7 +755,7 @@
def test_rewrite5(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
- 'C manifestation O, C role R, R name "illustrator"'}
+ 'C manifestation O, C role R, R name "illustrator"'}
rqlst = rqlhelper.parse(u'Any A,B WHERE C require_permission R, A illustrator_of B, '
'S require_state O')
rule_rewrite(rqlst, rules)
@@ -678,7 +767,7 @@
# Tests for the with clause
def test_rewrite_with(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
- 'C manifestation O, C role R, R name "illustrator"'}
+ 'C manifestation O, C role R, R name "illustrator"'}
rqlst = rqlhelper.parse(u'Any A,B WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WITH A,B BEING '
@@ -688,8 +777,9 @@
def test_rewrite_with2(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
- 'C manifestation O, C role R, R name "illustrator"'}
- rqlst = rqlhelper.parse(u'Any A,B WHERE T require_permission C WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)')
+ 'C manifestation O, C role R, R name "illustrator"'}
+ rqlst = rqlhelper.parse(u'Any A,B WHERE T require_permission C '
+ 'WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE T require_permission C '
'WITH A,B BEING (Any X,Y WHERE A is Contribution, '
@@ -707,32 +797,34 @@
def test_rewrite_with4(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
- 'C manifestation O, C role R, R name "illustrator"'}
+ 'C manifestation O, C role R, R name "illustrator"'}
rqlst = rqlhelper.parse(u'Any A,B WHERE A illustrator_of B '
- 'WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)')
+ 'WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)')
rule_rewrite(rqlst, rules)
- self.assertEqual('Any A,B WHERE C is Contribution, '
- 'C contributor A, C manifestation B, C role D, '
- 'D name "illustrator" WITH A,B BEING '
- '(Any X,Y WHERE A is Contribution, A contributor X, '
- 'A manifestation Y, A role B, B name "illustrator")',
- rqlst.as_string())
+ self.assertEqual(
+ 'Any A,B WHERE C is Contribution, '
+ 'C contributor A, C manifestation B, C role D, '
+ 'D name "illustrator" WITH A,B BEING '
+ '(Any X,Y WHERE A is Contribution, A contributor X, '
+ 'A manifestation Y, A role B, B name "illustrator")',
+ rqlst.as_string())
# Tests for the union
def test_rewrite_union(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
- 'C manifestation O, C role R, R name "illustrator"'}
+ 'C manifestation O, C role R, R name "illustrator"'}
rqlst = rqlhelper.parse(u'(Any A,B WHERE A illustrator_of B) UNION'
'(Any X,Y WHERE X is CWUser, Z manifestation Y)')
rule_rewrite(rqlst, rules)
- self.assertEqual('(Any A,B WHERE C is Contribution, '
- 'C contributor A, C manifestation B, C role D, '
- 'D name "illustrator") UNION (Any X,Y WHERE X is CWUser, Z manifestation Y)',
- rqlst.as_string())
+ self.assertEqual(
+ '(Any A,B WHERE C is Contribution, '
+ 'C contributor A, C manifestation B, C role D, '
+ 'D name "illustrator") UNION (Any X,Y WHERE X is CWUser, Z manifestation Y)',
+ rqlst.as_string())
def test_rewrite_union2(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
- 'C manifestation O, C role R, R name "illustrator"'}
+ 'C manifestation O, C role R, R name "illustrator"'}
rqlst = rqlhelper.parse(u'(Any Y WHERE Y match W) UNION '
'(Any A WHERE A illustrator_of B) UNION '
'(Any Y WHERE Y is ArtWork)')
@@ -746,9 +838,9 @@
# Tests for the exists clause
def test_rewrite_exists(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
- 'C manifestation O, C role R, R name "illustrator"'}
+ 'C manifestation O, C role R, R name "illustrator"'}
rqlst = rqlhelper.parse(u'(Any A,B WHERE A illustrator_of B, '
- 'EXISTS(B is ArtWork))')
+ 'EXISTS(B is ArtWork))')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE EXISTS(B is ArtWork), '
'C is Contribution, C contributor A, C manifestation B, C role D, '
@@ -757,7 +849,7 @@
def test_rewrite_exists2(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
- 'C manifestation O, C role R, R name "illustrator"'}
+ 'C manifestation O, C role R, R name "illustrator"'}
rqlst = rqlhelper.parse(u'(Any A,B WHERE B contributor A, EXISTS(A illustrator_of W))')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE B contributor A, '
@@ -767,7 +859,7 @@
def test_rewrite_exists3(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
- 'C manifestation O, C role R, R name "illustrator"'}
+ 'C manifestation O, C role R, R name "illustrator"'}
rqlst = rqlhelper.parse(u'(Any A,B WHERE A illustrator_of B, EXISTS(A illustrator_of W))')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE EXISTS(C is Contribution, C contributor A, '
@@ -779,13 +871,14 @@
# Test for GROUPBY
def test_rewrite_groupby(self):
rules = {'participated_in': 'S contributor O'}
- rqlst = rqlhelper.parse(u'Any SUM(SA) GROUPBY S WHERE P participated_in S, P manifestation SA')
+ rqlst = rqlhelper.parse(u'Any SUM(SA) GROUPBY S '
+ 'WHERE P participated_in S, P manifestation SA')
rule_rewrite(rqlst, rules)
self.assertEqual('Any SUM(SA) GROUPBY S WHERE P manifestation SA, P contributor S',
rqlst.as_string())
-class RQLRelationRewriterTC(CubicWebTC):
+class RQLRelationRewriterCWTC(CubicWebTC):
appid = 'data-rewrite'
@@ -794,8 +887,8 @@
art = cnx.create_entity('ArtWork', name=u'Les travailleurs de la Mer')
role = cnx.create_entity('Role', name=u'illustrator')
vic = cnx.create_entity('Person', name=u'Victor Hugo')
- contrib = cnx.create_entity('Contribution', code=96, contributor=vic,
- manifestation=art, role=role)
+ cnx.create_entity('Contribution', code=96, contributor=vic,
+ manifestation=art, role=role)
rset = cnx.execute('Any X WHERE X illustrator_of S')
self.assertEqual([u'Victor Hugo'],
[result.name for result in rset.entities()])
@@ -816,4 +909,5 @@
if __name__ == '__main__':
- unittest_main()
+ import unittest
+ unittest.main()
--- a/cubicweb/web/component.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/web/component.py Fri Jan 12 11:02:52 2018 +0100
@@ -126,7 +126,7 @@
return url
def ajax_page_url(self, **params):
- divid = params.setdefault('divid', 'pageContent')
+ divid = params.setdefault('divid', 'contentmain')
params['rql'] = self.cw_rset.printable_rql()
return js_href("$(%s).loadxhtml(AJAX_PREFIX_URL, %s, 'get', 'swap')" % (
json_dumps('#'+divid), js.ajaxFuncArgs('view', params)))
--- a/cubicweb/web/test/unittest_views_basecontrollers.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/web/test/unittest_views_basecontrollers.py Fri Jan 12 11:02:52 2018 +0100
@@ -1012,6 +1012,13 @@
f = appobject(req)
self.assertEqual(f(12, 13), '25')
+ def test_badrequest(self):
+ with self.assertRaises(RemoteCallFailed) as cm:
+ with self.remote_calling('foo'):
+ pass
+ self.assertEqual(cm.exception.status, 400)
+ self.assertEqual(cm.exception.reason, 'no foo method')
+
class JSonControllerTC(AjaxControllerTC):
# NOTE: this class performs the same tests as AjaxController but with
--- a/cubicweb/web/views/ajaxcontroller.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/web/views/ajaxcontroller.py Fri Jan 12 11:02:52 2018 +0100
@@ -67,12 +67,13 @@
from functools import partial
from six import PY2, text_type
+from six.moves import http_client
from logilab.common.date import strptime
from logilab.common.registry import yes
from logilab.common.deprecation import deprecated
-from cubicweb import ObjectNotFound, NoSelectableObject
+from cubicweb import ObjectNotFound, NoSelectableObject, ValidationError
from cubicweb.appobject import AppObject
from cubicweb.utils import json, json_dumps, UStringIO
from cubicweb.uilib import exc_message
@@ -116,7 +117,8 @@
try:
fname = self._cw.form['fname']
except KeyError:
- raise RemoteCallFailed('no method specified')
+ raise RemoteCallFailed('no method specified',
+ status=http_client.BAD_REQUEST)
# 1/ check first for old-style (JSonController) ajax func for bw compat
try:
func = getattr(basecontrollers.JSonController, 'js_%s' % fname)
@@ -128,11 +130,13 @@
try:
func = self._cw.vreg['ajax-func'].select(fname, self._cw)
except ObjectNotFound:
- raise RemoteCallFailed('no %s method' % fname)
+ raise RemoteCallFailed('no %s method' % fname,
+ status=http_client.BAD_REQUEST)
else:
warn('[3.15] remote function %s found on JSonController, '
'use AjaxFunction / @ajaxfunc instead' % fname,
DeprecationWarning, stacklevel=2)
+ debug_mode = self._cw.vreg.config.debugmode
# no <arg> attribute means the callback takes no argument
args = self._cw.form.get('arg', ())
if not isinstance(args, (list, tuple)):
@@ -140,16 +144,23 @@
try:
args = [json.loads(arg) for arg in args]
except ValueError as exc:
- self.exception('error while decoding json arguments for '
- 'js_%s: %s (err: %s)', fname, args, exc)
- raise RemoteCallFailed(exc_message(exc, self._cw.encoding))
+ if debug_mode:
+ self.exception('error while decoding json arguments for '
+ 'js_%s: %s (err: %s)', fname, args, exc)
+ raise RemoteCallFailed(exc_message(exc, self._cw.encoding),
+ status=http_client.BAD_REQUEST)
try:
result = func(*args)
except (RemoteCallFailed, DirectResponse):
raise
+ except ValidationError:
+ raise RemoteCallFailed(exc_message(exc, self._cw.encoding),
+ status=http_client.BAD_REQUEST)
except Exception as exc:
- self.exception('an exception occurred while calling js_%s(%s): %s',
- fname, args, exc)
+ if debug_mode:
+ self.exception(
+ 'an exception occurred while calling js_%s(%s): %s',
+ fname, args, exc)
raise RemoteCallFailed(exc_message(exc, self._cw.encoding))
if result is None:
return ''
@@ -219,7 +230,8 @@
try:
return self._cw.execute(rql, args)
except Exception as ex:
- self.exception("error in _exec(rql=%s): %s", rql, ex)
+ if self._cw.vreg.config.debugmode:
+ self.exception("error in _exec(rql=%s): %s", rql, ex)
return None
return None
@@ -232,29 +244,25 @@
stream = UStringIO()
kwargs['w'] = stream.write
assert not paginate
- if divid == 'pageContent':
+ if divid == 'contentmain':
# ensure divid isn't reused by the view (e.g. table view)
del self._cw.form['divid']
- # mimick main template behaviour
- stream.write(u'<div id="pageContent">')
- vtitle = self._cw.form.get('vtitle')
- if vtitle:
- stream.write(u'<h1 class="vtitle">%s</h1>\n' % vtitle)
paginate = True
+ if divid == 'contentmain':
+ stream.write(u'<div id="contentmain">')
nav_html = UStringIO()
if paginate and not view.handle_pagination:
view.paginate(w=nav_html.write)
stream.write(nav_html.getvalue())
- if divid == 'pageContent':
- stream.write(u'<div id="contentmain">')
view.render(**kwargs)
+ stream.write(nav_html.getvalue())
+ if divid == 'contentmain':
+ stream.write(u'</div>')
extresources = self._cw.html_headers.getvalue(skiphead=True)
if extresources:
stream.write(u'<div class="ajaxHtmlHead">\n') # XXX use a widget?
stream.write(extresources)
stream.write(u'</div>\n')
- if divid == 'pageContent':
- stream.write(u'</div>%s</div>' % nav_html.getvalue())
return stream.getvalue()
--- a/cubicweb/web/views/basetemplates.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/web/views/basetemplates.py Fri Jan 12 11:02:52 2018 +0100
@@ -158,14 +158,14 @@
'etypenavigation', self._cw, rset=self.cw_rset)
if etypefilter and etypefilter.cw_propval('visible'):
etypefilter.render(w=w)
+ w(u'<div id="contentmain">\n')
nav_html = UStringIO()
if view and not view.handle_pagination:
view.paginate(w=nav_html.write)
w(nav_html.getvalue())
- w(u'<div id="contentmain">\n')
view.render(w=w)
+ w(nav_html.getvalue())
w(u'</div>\n') # close id=contentmain
- w(nav_html.getvalue())
w(u'</div>\n') # closes id=pageContent
self.template_footer(view)
--- a/cubicweb/web/views/facets.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/web/views/facets.py Fri Jan 12 11:02:52 2018 +0100
@@ -246,7 +246,7 @@
rset, vid, divid, paginate = context
else:
rset = self.cw_rset
- vid, divid = None, 'pageContent'
+ vid, divid = None, 'contentmain'
paginate = view and view.paginable
return rset, vid, divid, paginate
--- a/cubicweb/web/views/staticcontrollers.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/web/views/staticcontrollers.py Fri Jan 12 11:02:52 2018 +0100
@@ -168,7 +168,7 @@
f.write(line)
f.write(b'\n')
f.close()
- except:
+ except Exception:
os.remove(tmpfile)
raise
else:
--- a/cubicweb/wsgi/handler.py Fri Jan 12 10:56:30 2018 +0100
+++ b/cubicweb/wsgi/handler.py Fri Jan 12 11:02:52 2018 +0100
@@ -79,7 +79,7 @@
"""
def __init__(self, code, req, body=None):
text = STATUS_CODE_TEXT.get(code, 'UNKNOWN STATUS CODE')
- self.status = '%s %s' % (code, text)
+ self.status = '%d %s' % (code, text)
self.headers = list(chain(*[zip(repeat(k), v)
for k, v in req.headers_out.getAllRawHeaders()]))
self.headers = [(str(k), str(v)) for k, v in self.headers]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/changes/3.26.rst Fri Jan 12 11:02:52 2018 +0100
@@ -0,0 +1,9 @@
+3.26 (unreleased)
+=================
+
+New features
+------------
+
+* For ``pyramid`` instance configuration kind, logging is not handled anymore
+ by CubicWeb but should be configured through ``development.ini`` file
+ following https://docs.pylonsproject.org/projects/pyramid/en/latest/narr/logging.html.
--- a/doc/tutorials/advanced/part01_create-cube.rst Fri Jan 12 10:56:30 2018 +0100
+++ b/doc/tutorials/advanced/part01_create-cube.rst Fri Jan 12 11:02:52 2018 +0100
@@ -5,26 +5,35 @@
.. _adv_tuto_create_new_cube:
-Step 1: creating a new cube for my web site
+Step 1: creating a virtual environment
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Fisrt I need a python virtual environment with cubicweb::
+
+ virtualenv python-2.7.5_cubicweb
+ . /python-2.7.5_cubicweb/bin/activate
+ pip install cubicweb
+
+
+Step 2: creating a new cube for my web site
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
One note about my development environment: I wanted to use the packaged
-version of CubicWeb and cubes while keeping my cube in my user
-directory, let's say `~src/cubes`. I achieve this by setting the
-following environment variables::
+version of CubicWeb and cubes while keeping my cube in the current
+directory, let's say `~src/cubes`::
- CW_CUBES_PATH=~/src/cubes
+ cd ~src/cubes
CW_MODE=user
I can now create the cube which will hold custom code for this web
site using::
- cubicweb-ctl newcube --directory=~/src/cubes sytweb
+ cubicweb-ctl newcube sytweb
.. _adv_tuto_assemble_cubes:
-Step 2: pick building blocks into existing cubes
+Step 3: pick building blocks into existing cubes
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Almost everything I want to handle in my web-site is somehow already modelized in
@@ -50,7 +59,7 @@
entities supporting the `tags` relation by linking the to `Tag` entities. This
will allows navigation into a large number of picture.
-Ok, now I'll tell my cube requires all this by editing :file:`cubes/sytweb/__pkginfo__.py`:
+Ok, now I'll tell my cube requires all this by editing :file:`cubicweb-sytweb/cubicweb_sytweb/__pkginfo__.py`:
.. sourcecode:: python
@@ -64,26 +73,20 @@
Notice that you can express minimal version of the cube that should be used,
`None` meaning whatever version available. All packages starting with 'cubicweb-'
-will be recognized as being cube, not bare python packages. You can still specify
-this explicitly using instead the `__depends_cubes__` dictionary which should
-contains cube's name without the prefix. So the example below would be written
-as:
-
- .. sourcecode:: python
-
- __depends__ = {'cubicweb': '>= 3.10.0'}
- __depends_cubes__ = {'file': '>= 1.9.0',
- 'folder': '>= 1.1.0',
- 'person': '>= 1.2.0',
- 'comment': '>= 1.2.0',
- 'tag': '>= 1.2.0',
- 'zone': None}
+will be recognized as being cube, not bare python packages.
If your cube is packaged for debian, it's a good idea to update the
`debian/control` file at the same time, so you won't forget it.
+Now, I need to install all the dependencies::
-Step 3: glue everything together in my cube's schema
+ cd cubicweb-sytweb
+ pip install -e .
+ pip install cubicweb[etwist]
+ pip install psycopg2 # for postgresql
+
+
+Step 4: glue everything together in my cube's schema
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. sourcecode:: python
@@ -131,7 +134,7 @@
features (and goals), we won't worry about it for now and see that later when needed.
-Step 4: creating the instance
+Step 5: creating the instance
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Now that I have a schema, I want to create an instance. To
@@ -139,6 +142,8 @@
cubicweb-ctl create sytweb sytweb_instance
+Don't forget to say "yes" to the question: `Allow anonymous access ? [y/N]:`
+
Hint: if you get an error while the database is initialized, you can
avoid having to answer the questions again by running::
--- a/doc/tutorials/advanced/part02_security.rst Fri Jan 12 10:56:30 2018 +0100
+++ b/doc/tutorials/advanced/part02_security.rst Fri Jan 12 11:02:52 2018 +0100
@@ -194,7 +194,7 @@
def precommit_event(self):
for eid in self.get_data():
- entity = self.session.entity_from_eid(eid)
+ entity = self.cnx.entity_from_eid(eid)
if entity.visibility == 'parent':
entity.cw_set(visibility=u'authenticated')
@@ -364,7 +364,7 @@
.. sourcecode:: bash
- $ pytest unittest_sytweb.py
+ $ python test/unittest_sytweb.py
======================== unittest_sytweb.py ========================
-> creating tables [....................]
-> inserting default user and default groups.
@@ -382,7 +382,7 @@
.. sourcecode:: bash
- $ pytest unittest_sytweb.py
+ $ python test/unittest_sytweb.py
======================== unittest_sytweb.py ========================
.
----------------------------------------------------------------------
@@ -396,11 +396,6 @@
$ rm data/database/tmpdb*
-.. Note::
- pytest is a very convenient utility used to control test execution. It is available from the `logilab-common`_ package.
-
-.. _`logilab-common`: http://www.logilab.org/project/logilab-common
-
.. _adv_tuto_migration_script:
Step 4: writing the migration script and migrating the instance
--- a/doc/tutorials/advanced/part03_bfss.rst Fri Jan 12 10:56:30 2018 +0100
+++ b/doc/tutorials/advanced/part03_bfss.rst Fri Jan 12 11:02:52 2018 +0100
@@ -12,7 +12,7 @@
Since the function to register a custom storage needs to have a repository
instance as first argument, we've to call it in a server startup hook. So I added
-in `cubes/sytweb/hooks.py` :
+in `cubicweb_sytweb/hooks.py` :
.. sourcecode:: python
--- a/doc/tutorials/advanced/part04_ui-base.rst Fri Jan 12 10:56:30 2018 +0100
+++ b/doc/tutorials/advanced/part04_ui-base.rst Fri Jan 12 11:02:52 2018 +0100
@@ -87,7 +87,7 @@
class IndexView(startup.IndexView):
def call(self, **kwargs):
self.w(u'<div>\n')
- if self._cw.cnx.anonymous_connection:
+ if self._cw.cnx.session.anonymous_session:
self.w(u'<h4>%s</h4>\n' % self._cw._('Public Albums'))
else:
self.w(u'<h4>%s</h4>\n' % self._cw._('Albums for %s') % self._cw.user.login)
@@ -317,8 +317,8 @@
To see if everything is ok on my test instance, I do: ::
- $ cubicweb-ctl i18ninstance sytweb
- $ cubicweb-ctl start -D sytweb
+ $ cubicweb-ctl i18ninstance sytweb_instance
+ $ cubicweb-ctl start -D sytweb_instance
The first command compile i18n catalogs (e.g. generates '.mo' files) for my test
instance. The second command start it in debug mode, so I can open my browser and
--- a/flake8-ok-files.txt Fri Jan 12 10:56:30 2018 +0100
+++ b/flake8-ok-files.txt Fri Jan 12 11:02:52 2018 +0100
@@ -9,6 +9,7 @@
cubicweb/dataimport/test/test_csv.py
cubicweb/dataimport/test/test_pgstore.py
cubicweb/dataimport/test/test_massive_store.py
+cubicweb/dataimport/test/test_sqlgenstore.py
cubicweb/dataimport/test/test_stores.py
cubicweb/dataimport/test/unittest_importer.py
cubicweb/devtools/test/data/cubes/i18ntestcube/__init__.py
@@ -26,6 +27,7 @@
cubicweb/etwist/request.py
cubicweb/etwist/service.py
cubicweb/ext/__init__.py
+cubicweb/hooks/synccomputed.py
cubicweb/hooks/syncsources.py
cubicweb/hooks/test/data/hooks.py
cubicweb/hooks/test/unittest_notificationhooks.py
@@ -42,7 +44,10 @@
cubicweb/server/rqlannotation.py
cubicweb/server/schema2sql.py
cubicweb/server/session.py
+cubicweb/server/sources/__init__.py
+cubicweb/server/sources/native.py
cubicweb/server/sqlutils.py
+cubicweb/server/ssplanner.py
cubicweb/server/utils.py
cubicweb/server/test/datacomputed/migratedapp/schema.py
cubicweb/server/test/datacomputed/schema.py
@@ -56,8 +61,10 @@
cubicweb/server/test/unittest_checkintegrity.py
cubicweb/server/test/unittest_datafeed.py
cubicweb/server/test/unittest_ldapsource.py
+cubicweb/server/test/unittest_migractions.py
cubicweb/server/test/unittest_serverctl.py
cubicweb/server/test/unittest_session.py
+cubicweb/server/test/unittest_ssplanner.py
cubicweb/server/test/unittest_rqlannotation.py
cubicweb/server/test/unittest_utils.py
cubicweb/schema.py
@@ -66,8 +73,6 @@
cubicweb/sobjects/test/unittest_notification.py
cubicweb/sobjects/test/unittest_register_user.py
cubicweb/sobjects/textparsers.py
-cubicweb/sources/__init__.py
-cubicweb/sources/native.py
cubicweb/test/data/libpython/cubicweb_comment/__init__.py
cubicweb/test/data/libpython/cubicweb_comment/__pkginfo__.py
cubicweb/test/data/libpython/cubicweb_email/entities.py
@@ -95,6 +100,7 @@
cubicweb/test/unittest_mail.py
cubicweb/test/unittest_repoapi.py
cubicweb/test/unittest_req.py
+cubicweb/test/unittest_rqlrewrite.py
cubicweb/test/unittest_rtags.py
cubicweb/test/unittest_schema.py
cubicweb/test/unittest_toolsutils.py
--- a/tox.ini Fri Jan 12 10:56:30 2018 +0100
+++ b/tox.ini Fri Jan 12 11:02:52 2018 +0100
@@ -1,7 +1,7 @@
[tox]
envlist =
check-manifest,flake8,
- py{27,34}-{server,web,misc}
+ py{27,3}-{server,web,misc}
[testenv]
deps =
@@ -20,7 +20,7 @@
[testenv:flake8]
skip_install = true
deps =
- flake8 >= 3.4, < 3.5
+ flake8 >= 3.5
whitelist_externals =
/bin/sh
commands = /bin/sh -c "flake8 `xargs -a {toxinidir}/flake8-ok-files.txt`"
@@ -46,6 +46,7 @@
[pytest]
python_files = *test_*.py
+log_print = false
[flake8]
format = pylint