--- a/__init__.py Tue Oct 07 10:06:24 2014 +0200
+++ b/__init__.py Fri Oct 17 18:16:58 2014 +0200
@@ -44,10 +44,8 @@
from logilab.common.logging_ext import set_log_methods
from yams.constraints import BASE_CONVERTERS
-if os.environ.get('APYCOT_ROOT'):
- logging.basicConfig(level=logging.CRITICAL)
-else:
- logging.basicConfig()
+# pre python 2.7.2 safety
+logging.basicConfig()
from cubicweb.__pkginfo__ import version as __version__
--- a/__pkginfo__.py Tue Oct 07 10:06:24 2014 +0200
+++ b/__pkginfo__.py Fri Oct 17 18:16:58 2014 +0200
@@ -42,7 +42,7 @@
'logilab-common': '>= 0.62.0',
'logilab-mtconverter': '>= 0.8.0',
'rql': '>= 0.31.2',
- 'yams': '>= 0.39.1',
+ 'yams': '>= 0.40.0',
#gettext # for xgettext, msgcat, etc...
# web dependencies
'lxml': '',
--- a/cubicweb.spec Tue Oct 07 10:06:24 2014 +0200
+++ b/cubicweb.spec Fri Oct 17 18:16:58 2014 +0200
@@ -23,7 +23,7 @@
Requires: %{python}-logilab-common >= 0.62.0
Requires: %{python}-logilab-mtconverter >= 0.8.0
Requires: %{python}-rql >= 0.31.2
-Requires: %{python}-yams >= 0.39.1
+Requires: %{python}-yams >= 0.40.0
Requires: %{python}-logilab-database >= 1.12.1
Requires: %{python}-passlib
Requires: %{python}-lxml
--- a/cwconfig.py Tue Oct 07 10:06:24 2014 +0200
+++ b/cwconfig.py Fri Oct 17 18:16:58 2014 +0200
@@ -827,13 +827,6 @@
else:
_INSTANCES_DIR = join(_INSTALL_PREFIX, 'etc', 'cubicweb.d')
- if os.environ.get('APYCOT_ROOT'):
- _cubes_init = join(CubicWebNoAppConfiguration.CUBES_DIR, '__init__.py')
- if not exists(_cubes_init):
- file(join(_cubes_init), 'w').close()
- if not exists(_INSTANCES_DIR):
- os.makedirs(_INSTANCES_DIR)
-
# set to true during repair (shell, migration) to allow some things which
# wouldn't be possible otherwise
repairing = False
--- a/cwctl.py Tue Oct 07 10:06:24 2014 +0200
+++ b/cwctl.py Fri Oct 17 18:16:58 2014 +0200
@@ -836,6 +836,8 @@
config = cwcfg.config_for(appid)
# should not raise error if db versions don't match fs versions
config.repairing = True
+ # no need to load all appobjects and schema
+ config.quick_start = True
if hasattr(config, 'set_sources_mode'):
config.set_sources_mode(('migration',))
repo = config.migration_handler().repo_connect()
--- a/dataimport.py Tue Oct 07 10:06:24 2014 +0200
+++ b/dataimport.py Fri Oct 17 18:16:58 2014 +0200
@@ -49,12 +49,7 @@
GENERATORS.append( (gen_users, CHK) )
# create controller
- if 'cnx' in globals():
- ctl = CWImportController(RQLObjectStore(cnx))
- else:
- print 'debug mode (not connected)'
- print 'run through cubicweb-ctl shell to access an instance'
- ctl = CWImportController(ObjectStore())
+ ctl = CWImportController(RQLObjectStore(cnx))
ctl.askerror = 1
ctl.generators = GENERATORS
ctl.data['utilisateurs'] = lazytable(ucsvreader(open('users.csv')))
@@ -76,7 +71,7 @@
import inspect
from collections import defaultdict
from copy import copy
-from datetime import date, datetime
+from datetime import date, datetime, time
from time import asctime
from StringIO import StringIO
@@ -425,16 +420,87 @@
cnx.commit()
cu.close()
-def _create_copyfrom_buffer(data, columns, encoding='utf-8', replace_sep=None):
+
+def _copyfrom_buffer_convert_None(value, **opts):
+ '''Convert None value to "NULL"'''
+ return 'NULL'
+
+def _copyfrom_buffer_convert_number(value, **opts):
+ '''Convert a number into its string representation'''
+ return str(value)
+
+def _copyfrom_buffer_convert_string(value, **opts):
+ '''Convert string value.
+
+ Recognized keywords:
+ :encoding: resulting string encoding (default: utf-8)
+ :replace_sep: character used when input contains characters
+ that conflict with the column separator.
+ '''
+ encoding = opts.get('encoding','utf-8')
+ replace_sep = opts.get('replace_sep', None)
+ # Remove separators used in string formatting
+ for _char in (u'\t', u'\r', u'\n'):
+ if _char in value:
+ # If a replace_sep is given, replace
+ # the separator
+ # (and thus avoid empty buffer)
+ if replace_sep is None:
+ raise ValueError('conflicting separator: '
+ 'you must provide the replace_sep option')
+ value = value.replace(_char, replace_sep)
+ value = value.replace('\\', r'\\')
+ if isinstance(value, unicode):
+ value = value.encode(encoding)
+ return value
+
+def _copyfrom_buffer_convert_date(value, **opts):
+ '''Convert date into "YYYY-MM-DD"'''
+ # Do not use strftime, as it yields issue with date < 1900
+ # (http://bugs.python.org/issue1777412)
+ return '%04d-%02d-%02d' % (value.year, value.month, value.day)
+
+def _copyfrom_buffer_convert_datetime(value, **opts):
+ '''Convert date into "YYYY-MM-DD HH:MM:SS.UUUUUU"'''
+ # Do not use strftime, as it yields issue with date < 1900
+ # (http://bugs.python.org/issue1777412)
+ return '%s %s' % (_copyfrom_buffer_convert_date(value, **opts),
+ _copyfrom_buffer_convert_time(value, **opts))
+
+def _copyfrom_buffer_convert_time(value, **opts):
+ '''Convert time into "HH:MM:SS.UUUUUU"'''
+ return '%02d:%02d:%02d.%06d' % (value.hour, value.minute,
+ value.second, value.microsecond)
+
+# (types, converter) list.
+_COPYFROM_BUFFER_CONVERTERS = [
+ (type(None), _copyfrom_buffer_convert_None),
+ ((long, int, float), _copyfrom_buffer_convert_number),
+ (basestring, _copyfrom_buffer_convert_string),
+ (datetime, _copyfrom_buffer_convert_datetime),
+ (date, _copyfrom_buffer_convert_date),
+ (time, _copyfrom_buffer_convert_time),
+]
+
+def _create_copyfrom_buffer(data, columns=None, **convert_opts):
"""
Create a StringIO buffer for 'COPY FROM' command.
- Deals with Unicode, Int, Float, Date...
+ Deals with Unicode, Int, Float, Date... (see ``converters``)
+
+ :data: a sequence/dict of tuples
+ :columns: list of columns to consider (default to all columns)
+ :converter_opts: keyword arguements given to converters
"""
# Create a list rather than directly create a StringIO
# to correctly write lines separated by '\n' in a single step
rows = []
- if isinstance(data[0], (tuple, list)):
- columns = range(len(data[0]))
+ if columns is None:
+ if isinstance(data[0], (tuple, list)):
+ columns = range(len(data[0]))
+ elif isinstance(data[0], dict):
+ columns = data[0].keys()
+ else:
+ raise ValueError('Could not get columns: you must provide columns.')
for row in data:
# Iterate over the different columns and the different values
# and try to convert them to a correct datatype.
@@ -444,43 +510,19 @@
try:
value = row[col]
except KeyError:
- warnings.warn(u"Column %s is not accessible in row %s"
+ warnings.warn(u"Column %s is not accessible in row %s"
% (col, row), RuntimeWarning)
- # XXX 'value' set to None so that the import does not end in
- # error.
- # Instead, the extra keys are set to NULL from the
+ # XXX 'value' set to None so that the import does not end in
+ # error.
+ # Instead, the extra keys are set to NULL from the
# database point of view.
value = None
- if value is None:
- value = 'NULL'
- elif isinstance(value, (long, int, float)):
- value = str(value)
- elif isinstance(value, (str, unicode)):
- # Remove separators used in string formatting
- for _char in (u'\t', u'\r', u'\n'):
- if _char in value:
- # If a replace_sep is given, replace
- # the separator instead of returning None
- # (and thus avoid empty buffer)
- if replace_sep:
- value = value.replace(_char, replace_sep)
- else:
- return
- value = value.replace('\\', r'\\')
- if value is None:
- return
- if isinstance(value, unicode):
- value = value.encode(encoding)
- elif isinstance(value, (date, datetime)):
- value = '%04d-%02d-%02d' % (value.year,
- value.month,
- value.day)
- if isinstance(value, datetime):
- value += ' %02d:%02d:%02d' % (value.hour,
- value.minutes,
- value.second)
+ for types, converter in _COPYFROM_BUFFER_CONVERTERS:
+ if isinstance(value, types):
+ value = converter(value, **convert_opts)
+ break
else:
- return None
+ raise ValueError("Unsupported value type %s" % type(value))
# We push the value to the new formatted row
# if the value is not None and could be converted to a string.
formatted_row.append(value)
@@ -506,16 +548,11 @@
self.types = {}
self.relations = set()
self.indexes = {}
- self._rql = None
- self._commit = None
-
- def _put(self, type, item):
- self.items.append(item)
- return len(self.items) - 1
def create_entity(self, etype, **data):
data = attrdict(data)
- data['eid'] = eid = self._put(etype, data)
+ data['eid'] = eid = len(self.items)
+ self.items.append(data)
self.eids[eid] = data
self.types.setdefault(etype, []).append(eid)
return data
@@ -534,32 +571,12 @@
return relation
def commit(self):
- """this commit method do nothing by default
-
- This is voluntary to use the frequent autocommit feature in CubicWeb
- when you are using hooks or another
-
- If you want override commit method, please set it by the
- constructor
- """
- pass
+ """this commit method does nothing by default"""
+ return
def flush(self):
- """The method is provided so that all stores share a common API.
- It just tries to call the commit method.
- """
- print 'starting flush'
- try:
- self.commit()
- except:
- print 'failed to flush'
- else:
- print 'flush done'
-
- def rql(self, *args):
- if self._rql is not None:
- return self._rql(*args)
- return []
+ """The method is provided so that all stores share a common API"""
+ pass
@property
def nb_inserted_entities(self):
@@ -573,62 +590,47 @@
class RQLObjectStore(ObjectStore):
"""ObjectStore that works with an actual RQL repository (production mode)"""
- _rql = None # bw compat
- def __init__(self, session=None, commit=None):
- ObjectStore.__init__(self)
- if session is None:
- sys.exit('please provide a session of run this script with cubicweb-ctl shell and pass cnx as session')
- if not hasattr(session, 'set_cnxset'):
- if hasattr(session, 'request'):
- # connection object
- cnx = session
- session = session.request()
- else: # object is already a request
- cnx = session.cnx
- session.set_cnxset = lambda : None
- commit = commit or cnx.commit
- else:
- session.set_cnxset()
- self.session = session
- self._commit = commit or session.commit
+ def __init__(self, cnx, commit=None):
+ if commit is not None:
+ warnings.warn('[3.19] commit argument should not be specified '
+ 'as the cnx object already provides it.',
+ DeprecationWarning, stacklevel=2)
+ super(RQLObjectStore, self).__init__()
+ self._cnx = cnx
+ self._commit = commit or cnx.commit
def commit(self):
- txuuid = self._commit()
- self.session.set_cnxset()
- return txuuid
+ return self._commit()
def rql(self, *args):
- if self._rql is not None:
- return self._rql(*args)
- return self.session.execute(*args)
+ return self._cnx.execute(*args)
+
+ @property
+ def session(self):
+ warnings.warn('[3.19] deprecated property.', DeprecationWarning,
+ stacklevel=2)
+ return self._cnx.repo._get_session(self._cnx.sessionid)
def create_entity(self, *args, **kwargs):
- entity = self.session.create_entity(*args, **kwargs)
+ entity = self._cnx.create_entity(*args, **kwargs)
self.eids[entity.eid] = entity
self.types.setdefault(args[0], []).append(entity.eid)
return entity
- def _put(self, type, item):
- query = 'INSERT %s X' % type
- if item:
- query += ': ' + ', '.join('X %s %%(%s)s' % (k, k)
- for k in item)
- return self.rql(query, item)[0][0]
-
def relate(self, eid_from, rtype, eid_to, **kwargs):
eid_from, rtype, eid_to = super(RQLObjectStore, self).relate(
eid_from, rtype, eid_to, **kwargs)
self.rql('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype,
{'x': int(eid_from), 'y': int(eid_to)})
- @deprecated("[3.19] use session.find(*args, **kwargs).entities() instead")
+ @deprecated("[3.19] use cnx.find(*args, **kwargs).entities() instead")
def find_entities(self, *args, **kwargs):
- return self.session.find(*args, **kwargs).entities()
+ return self._cnx.find(*args, **kwargs).entities()
- @deprecated("[3.19] use session.find(*args, **kwargs).one() instead")
+ @deprecated("[3.19] use cnx.find(*args, **kwargs).one() instead")
def find_one_entity(self, *args, **kwargs):
- return self.session.find(*args, **kwargs).one()
+ return self._cnx.find(*args, **kwargs).one()
# the import controller ########################################################
@@ -755,7 +757,6 @@
class NoHookRQLObjectStore(RQLObjectStore):
"""ObjectStore that works with an actual RQL repository (production mode)"""
- _rql = None # bw compat
def __init__(self, session, metagen=None, baseurl=None):
super(NoHookRQLObjectStore, self).__init__(session)
@@ -768,7 +769,6 @@
self._nb_inserted_entities = 0
self._nb_inserted_types = 0
self._nb_inserted_relations = 0
- self.rql = session.execute
# deactivate security
session.read_security = False
session.write_security = False
@@ -821,9 +821,6 @@
def nb_inserted_relations(self):
return self._nb_inserted_relations
- def _put(self, type, item):
- raise RuntimeError('use create entity')
-
class MetaGenerator(object):
META_RELATIONS = (META_RTYPES
@@ -1056,10 +1053,6 @@
nb_threads=self.nb_threads_statement,
support_copy_from=self.support_copy_from,
encoding=self.dbencoding)
- except:
- print 'failed to flush'
- else:
- print 'flush done'
finally:
_entities_sql.clear()
_relations_sql.clear()
--- a/debian/control Tue Oct 07 10:06:24 2014 +0200
+++ b/debian/control Fri Oct 17 18:16:58 2014 +0200
@@ -15,7 +15,7 @@
python-unittest2 | python (>= 2.7),
python-logilab-mtconverter,
python-rql,
- python-yams (>= 0.39.1),
+ python-yams (>= 0.40.0),
python-lxml,
Standards-Version: 3.9.1
Homepage: http://www.cubicweb.org
@@ -152,7 +152,7 @@
gettext,
python-logilab-mtconverter (>= 0.8.0),
python-logilab-common (>= 0.62.0),
- python-yams (>= 0.39.1),
+ python-yams (>= 0.40.0),
python-rql (>= 0.31.2),
python-lxml
Recommends:
--- a/doc/book/en/devrepo/datamodel/definition.rst Tue Oct 07 10:06:24 2014 +0200
+++ b/doc/book/en/devrepo/datamodel/definition.rst Fri Oct 17 18:16:58 2014 +0200
@@ -1,4 +1,4 @@
- .. -*- coding: utf-8 -*-
+.. -*- coding: utf-8 -*-
.. _datamodel_definition:
@@ -523,6 +523,202 @@
.. _yams_example:
+
+Derived attributes and relations
+--------------------------------
+
+.. note:: **TODO** Check organisation of the whole chapter of the documentation
+
+Cubicweb offers the possibility to *query* data using so called
+*computed* relations and attributes. Those are *seen* by RQL requests
+as normal attributes and relations but are actually derived from other
+attributes and relations. In a first section we'll informally review
+two typical use cases. Then we see how to use computed attributes and
+relations in your schema. Last we will consider various significant
+aspects of their implementation and the impact on their usage.
+
+Motivating use cases
+~~~~~~~~~~~~~~~~~~~~
+
+Computed (or reified) relations
+```````````````````````````````
+
+It often arises that one must represent a ternary relation, or a
+family of relations. For example, in the context of an exhibition
+catalog you might want to link all *contributors* to the *work* they
+contributed to, but this contribution can be as *illustrator*,
+*author*, *performer*, ...
+
+The classical way to describe this kind of information within an
+entity-relationship schema is to *reify* the relation, that is turn
+the relation into a entity. In our example the schema will have a
+*Contribution* entity type used to represent the family of the
+contribution relations.
+
+
+.. sourcecode:: python
+
+ class ArtWork(EntityType):
+ name = String()
+ ...
+
+ class Person(EntityType):
+ name = String()
+ ...
+
+ class Contribution(EntityType):
+ contributor = SubjectRelation('Person', cardinality='1*', inlined=True)
+ manifestation = SubjectRelation('ArtWork')
+ role = SubjectRelation('Role')
+
+ class Role(EntityType):
+ name = String()
+
+But then, in order to query the illustrator(s) ``I`` of a work ``W``,
+one has to write::
+
+ Any I, W WHERE C is Contribution, C contributor I, C manifestation W,
+ C role R, R name 'illustrator'
+
+whereas we would like to be able to simply write::
+
+ Any I, W WHERE I illustrator_of W
+
+This is precisely what the computed relations allow.
+
+
+Computed (or synthesized) attribute
+```````````````````````````````````
+
+Assuming a trivial schema for describing employees in companies, one
+can be interested in the total of salaries payed by a company for
+all its employees. One has to write::
+
+ Any C, SUM(SA) GROUPBY S WHERE E works_for C, E salary SA
+
+whereas it would be most convenient to simply write::
+
+ Any C, TS WHERE C total_salary TS
+
+And this is again what computed attributes provide.
+
+
+Using computed attributes and relations
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Computed (or reified) relations
+```````````````````````````````
+
+In the above case we would define the *computed relation*
+``illustrator_of`` in the schema by:
+
+.. sourcecode:: python
+
+ class illustrator_of(ComputedRelationType):
+ rule = ('C is Contribution, C contributor S, C manifestation O,'
+ 'C role R, R name "illustrator"')
+
+You will note that:
+
+* the ``S`` and ``O`` RQL variables implicitly identify the subject and
+ object of the defined computed relation, akin to what happens in
+ RRQLExpression
+* the possible subject and object entity types are inferred from the rule;
+* computed relation definitions always have empty *add* and *delete* permissions
+* *read* permissions can be defined, permissions from the relations used in the
+ rewrite rule **are not considered** ;
+* nothing else may be defined on the `ComputedRelation` subclass beside
+ description, permissions and rule (e.g. no cardinality, composite, etc.,).
+ `BadSchemaDefinition` is raised on attempt to specify other attributes;
+* computed relations can not be used in 'SET' and 'DELETE' rql queries
+ (`BadQuery` exception raised).
+
+
+NB: The fact that the *add* and *delete* permissions are *empty* even
+for managers is expected to make the automatic UI not attempt to edit
+them.
+
+Computed (or synthesized) attributes
+````````````````````````````````````
+
+In the above case we would define the *computed attribute*
+``total_salary`` on the ``Company`` entity type in the schema by::
+
+.. sourcecode:: python
+
+ class Company(EntityType):
+ name = String()
+ total_salary = Int(formula='Any SUM(SA) GROUPBY E WHERE P works_for X, E salary SA')
+
+* the ``X`` RQL variable implicitly identifies the entity holding the
+ computed attribute, akin to what happens in ERQLExpression;
+* the type inferred from the formula is checked against the declared type, and
+ `BadSchemaDefinition` is raised if they don't match;
+* the computed attributes always have empty *update* permissions
+* `BadSchemaDefinition` is raised on attempt to set 'update' permissions;
+* 'read' permissions can be defined, permissions regarding the formula
+ **are not considered**;
+* other attribute's property (inlined, ...) can be defined as for normal attributes;
+* Similarly to computed relation, computed attribute can't be used in 'SET' and
+ 'DELETE' rql queries (`BadQuery` exception raised).
+
+
+API and implementation
+~~~~~~~~~~~~~~~~~~~~~~
+
+Representation in the data backend
+``````````````````````````````````
+
+Computed relations have no direct representation at the SQL table
+level. Instead, each time a query is issued the query is rewritten to
+replace the computed relation by its equivalent definition and the
+resulting rewritten query is performed in the usual way.
+
+On the contrary, computed attributes are represented as a column in the
+table for their host entity type, just like normal attributes. Their
+value is kept up-to-date with respect to their defintion by a system
+of hooks (also called triggers in most RDBMS) which recomputes them
+when the relations and attributes they depend on are modified.
+
+Yams API
+````````
+
+When accessing the schema through the *yams API* (not when defining a
+schema in a ``schema.py`` file) the computed attributes and relations
+are represented as follows:
+
+relations
+ The ``yams.RelationSchema`` class has a new ``rule`` attribute
+ holding the rule as a string. If this attribute is set all others
+ must not be set.
+attributes
+ A new property ``formula`` is added on class
+ ``yams.RelationDefinitionSchema`` alomng with a new keyword
+ argument ``formula`` on the initializer.
+
+Migration
+`````````
+
+The migrations are to be handled as summarized in the array below.
+
++------------+---------------------------------------------------+---------------------------------------+
+| | Computed rtype | Computed attribute |
++============+===================================================+=======================================+
+| add | * add_relation_type | * add_attribute |
+| | * add_relation_definition should trigger an error | * add_relation_definition |
++------------+---------------------------------------------------+---------------------------------------+
+| modify | * sync_schema_prop_perms: | * sync_schema_prop_perms: |
+| | checks the rule is | |
+| (rule or | synchronized with the database | - empty the cache, |
+| formula) | | - check formula, |
+| | | - make sure all the values get |
+| | | updated |
++------------+---------------------------------------------------+---------------------------------------+
+| del | * drop_relation_type | * drop_attribute |
+| | * drop_relation_definition should trigger an error| * drop_relation_definition |
++------------+---------------------------------------------------+---------------------------------------+
+
+
Defining your schema using yams
-------------------------------
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hooks/synccomputed.py Fri Oct 17 18:16:58 2014 +0200
@@ -0,0 +1,227 @@
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+"""Hooks for synchronizing computed attributes"""
+
+__docformat__ = "restructuredtext en"
+_ = unicode
+
+from collections import defaultdict
+
+from rql import nodes
+
+from cubicweb.server import hook
+
+
+class RecomputeAttributeOperation(hook.DataOperationMixIn, hook.Operation):
+ """Operation to recompute caches of computed attribute at commit time,
+ depending on what's have been modified in the transaction and avoiding to
+ recompute twice the same attribute
+ """
+ containercls = dict
+ def add_data(self, computed_attribute, eid=None):
+ try:
+ self._container[computed_attribute].add(eid)
+ except KeyError:
+ self._container[computed_attribute] = set((eid,))
+
+ def precommit_event(self):
+ for computed_attribute_rdef, eids in self.get_data().iteritems():
+ attr = computed_attribute_rdef.rtype
+ formula = computed_attribute_rdef.formula
+ rql = formula.replace('Any ', 'Any X, ', 1)
+ kwargs = None
+ # add constraint on X to the formula
+ if None in eids : # recompute for all etype if None is found
+ rql += ', X is %s' % computed_attribute_rdef.subject
+ elif len(eids) == 1:
+ rql += ', X eid %(x)s'
+ kwargs = {'x': eids.pop()}
+ else:
+ rql += ', X eid IN (%s)' % ', '.join((str(eid) for eid in eids))
+ update_rql = 'SET X %s %%(value)s WHERE X eid %%(x)s' % attr
+ for eid, value in self.cnx.execute(rql, kwargs):
+ self.cnx.execute(update_rql, {'value': value, 'x': eid})
+
+
+class EntityWithCACreatedHook(hook.Hook):
+ """When creating an entity that has some computed attribute, those
+ attributes have to be computed.
+
+ Concret class of this hook are generated at registration time by
+ introspecting the schema.
+ """
+ __abstract__ = True
+ events = ('after_add_entity',)
+ # list of computed attribute rdefs that have to be recomputed
+ computed_attributes = None
+
+ def __call__(self):
+ for rdef in self.computed_attributes:
+ RecomputeAttributeOperation.get_instance(self._cw).add_data(
+ rdef, self.entity.eid)
+
+
+class RelationInvolvedInCAModifiedHook(hook.Hook):
+ """When some relation used in a computed attribute is updated, those
+ attributes have to be recomputed.
+
+ Concret class of this hook are generated at registration time by
+ introspecting the schema.
+ """
+ __abstract__ = True
+ events = ('after_add_relation', 'before_delete_relation')
+ # list of (computed attribute rdef, optimize_on) that have to be recomputed
+ optimized_computed_attributes = None
+
+ def __call__(self):
+ for rdef, optimize_on in self.optimized_computed_attributes:
+ if optimize_on is None:
+ eid = None
+ else:
+ eid = getattr(self, optimize_on)
+ RecomputeAttributeOperation.get_instance(self._cw).add_data(rdef, eid)
+
+
+class AttributeInvolvedInCAModifiedHook(hook.Hook):
+ """When some attribute used in a computed attribute is updated, those
+ attributes have to be recomputed.
+
+ Concret class of this hook are generated at registration time by
+ introspecting the schema.
+ """
+ __abstract__ = True
+ events = ('after_update_entity',)
+ # list of (computed attribute rdef, attributes of this entity type involved)
+ # that may have to be recomputed
+ attributes_computed_attributes = None
+
+ def __call__(self):
+ edited_attributes = frozenset(self.entity.cw_edited)
+ for rdef, used_attributes in self.attributes_computed_attributes.iteritems():
+ if edited_attributes.intersection(used_attributes):
+ # XXX optimize if the modified attributes belong to the same
+ # entity as the computed attribute
+ RecomputeAttributeOperation.get_instance(self._cw).add_data(rdef)
+
+
+# code generation at registration time #########################################
+
+def _optimize_on(formula_select, rtype):
+ """Given a formula and some rtype, tells whether on update of the given
+ relation, formula may be recomputed only for rhe relation's subject
+ ('eidfrom' returned), object ('eidto' returned) or None.
+
+ Optimizing is only possible when X is used as direct subject/object of this
+ relation, else we may miss some necessary update.
+ """
+ for rel in formula_select.get_nodes(nodes.Relation):
+ if rel.r_type == rtype:
+ sub = rel.get_variable_parts()[0]
+ obj = rel.get_variable_parts()[1]
+ if sub.name == 'X':
+ return 'eidfrom'
+ elif obj.name == 'X':
+ return 'eidto'
+ else:
+ return None
+
+
+class _FormulaDependenciesMatrix(object):
+ """This class computes and represents the dependencies of computed attributes
+ towards relations and attributes
+ """
+
+ def __init__(self, schema):
+ """Analyzes the schema to compute the dependencies"""
+ # entity types holding some computed attribute {etype: [computed rdefs]}
+ self.computed_attribute_by_etype = defaultdict(list)
+ # depending entity types {dep. etype: {computed rdef: dep. etype attributes}}
+ self.computed_attribute_by_etype_attrs = defaultdict(lambda: defaultdict(set))
+ # depending relations def {dep. rdef: [computed rdefs]
+ self.computed_attribute_by_relation = defaultdict(list) # by rdef
+ # Walk through all attributes definitions
+ for rdef in schema.iter_computed_attributes():
+ self.computed_attribute_by_etype[rdef.subject.type].append(rdef)
+ # extract the relations it depends upon - `rdef.formula_select` is
+ # expected to have been set by finalize_computed_attributes
+ select = rdef.formula_select
+ for rel_node in select.get_nodes(nodes.Relation):
+ rschema = schema.rschema(rel_node.r_type)
+ lhs, rhs = rel_node.get_variable_parts()
+ for sol in select.solutions:
+ subject_etype = sol[lhs.name]
+ if isinstance(rhs, nodes.VariableRef):
+ object_etypes = set(sol[rhs.name] for sol in select.solutions)
+ else:
+ object_etypes = rschema.objects(subject_etype)
+ for object_etype in object_etypes:
+ if rschema.final:
+ attr_for_computations = self.computed_attribute_by_etype_attrs[subject_etype]
+ attr_for_computations[rdef].add(rschema.type)
+ else:
+ depend_on_rdef = rschema.rdefs[subject_etype, object_etype]
+ self.computed_attribute_by_relation[depend_on_rdef].append(rdef)
+
+ def generate_entity_creation_hooks(self):
+ for etype, computed_attributes in self.computed_attribute_by_etype.iteritems():
+ regid = 'computed_attribute.%s_created' % etype
+ selector = hook.is_instance(etype)
+ yield type('%sCreatedHook' % etype,
+ (EntityWithCACreatedHook,),
+ {'__regid__': regid,
+ '__select__': hook.Hook.__select__ & selector,
+ 'computed_attributes': computed_attributes})
+
+ def generate_relation_change_hooks(self):
+ for rdef, computed_attributes in self.computed_attribute_by_relation.iteritems():
+ regid = 'computed_attribute.%s_modified' % rdef.rtype
+ selector = hook.match_rtype(rdef.rtype.type,
+ frometypes=(rdef.subject.type,),
+ toetypes=(rdef.object.type,))
+ optimized_computed_attributes = []
+ for computed_rdef in computed_attributes:
+ optimized_computed_attributes.append(
+ (computed_rdef,
+ _optimize_on(computed_rdef.formula_select, rdef.rtype))
+ )
+ yield type('%sModifiedHook' % rdef.rtype,
+ (RelationInvolvedInCAModifiedHook,),
+ {'__regid__': regid,
+ '__select__': hook.Hook.__select__ & selector,
+ 'optimized_computed_attributes': optimized_computed_attributes})
+
+ def generate_entity_update_hooks(self):
+ for etype, attributes_computed_attributes in self.computed_attribute_by_etype_attrs.iteritems():
+ regid = 'computed_attribute.%s_updated' % etype
+ selector = hook.is_instance(etype)
+ yield type('%sModifiedHook' % etype,
+ (AttributeInvolvedInCAModifiedHook,),
+ {'__regid__': regid,
+ '__select__': hook.Hook.__select__ & selector,
+ 'attributes_computed_attributes': attributes_computed_attributes})
+
+
+def registration_callback(vreg):
+ vreg.register_all(globals().values(), __name__)
+ dependencies = _FormulaDependenciesMatrix(vreg.schema)
+ for hook_class in dependencies.generate_entity_creation_hooks():
+ vreg.register(hook_class)
+ for hook_class in dependencies.generate_relation_change_hooks():
+ vreg.register(hook_class)
+ for hook_class in dependencies.generate_entity_update_hooks():
+ vreg.register(hook_class)
--- a/hooks/syncschema.py Tue Oct 07 10:06:24 2014 +0200
+++ b/hooks/syncschema.py Fri Oct 17 18:16:58 2014 +0200
@@ -27,7 +27,8 @@
_ = unicode
from copy import copy
-from yams.schema import BASE_TYPES, RelationSchema, RelationDefinitionSchema
+from yams.schema import (BASE_TYPES, BadSchemaDefinition,
+ RelationSchema, RelationDefinitionSchema)
from yams import buildobjs as ybo, schema2sql as y2sql, convert_default_value
from logilab.common.decorators import clear_cache
@@ -38,6 +39,7 @@
CONSTRAINTS, ETYPE_NAME_MAP, display_name)
from cubicweb.server import hook, schemaserial as ss
from cubicweb.server.sqlutils import SQL_PREFIX
+from cubicweb.hooks.synccomputed import RecomputeAttributeOperation
# core entity and relation types which can't be removed
CORE_TYPES = BASE_TYPES | SCHEMA_TYPES | META_RTYPES | set(
@@ -70,14 +72,14 @@
table = SQL_PREFIX + etype
column = SQL_PREFIX + rtype
try:
- cnx.system_sql(str('ALTER TABLE %s ADD %s integer'
- % (table, column)), rollback_on_failure=False)
+ cnx.system_sql(str('ALTER TABLE %s ADD %s integer' % (table, column)),
+ rollback_on_failure=False)
cnx.info('added column %s to table %s', column, table)
except Exception:
# silent exception here, if this error has not been raised because the
# column already exists, index creation will fail anyway
cnx.exception('error while adding column %s to table %s',
- table, column)
+ table, column)
# create index before alter table which may expectingly fail during test
# (sqlite) while index creation should never fail (test for index existence
# is done by the dbhelper)
@@ -166,8 +168,8 @@
# drop index if any
source.drop_index(cnx, table, column)
if source.dbhelper.alter_column_support:
- cnx.system_sql('ALTER TABLE %s DROP COLUMN %s'
- % (table, column), rollback_on_failure=False)
+ cnx.system_sql('ALTER TABLE %s DROP COLUMN %s' % (table, column),
+ rollback_on_failure=False)
self.info('dropped column %s from table %s', column, table)
else:
# not supported by sqlite for instance
@@ -307,7 +309,7 @@
class CWRTypeUpdateOp(MemSchemaOperation):
"""actually update some properties of a relation definition"""
rschema = entity = values = None # make pylint happy
- oldvalus = None
+ oldvalues = None
def precommit_event(self):
rschema = self.rschema
@@ -388,6 +390,21 @@
# XXX revert changes on database
+class CWComputedRTypeUpdateOp(MemSchemaOperation):
+ """actually update some properties of a computed relation definition"""
+ rschema = entity = rule = None # make pylint happy
+ old_rule = None
+
+ def precommit_event(self):
+ # update the in-memory schema first
+ self.old_rule = self.rschema.rule
+ self.rschema.rule = self.rule
+
+ def revertprecommit_event(self):
+ # revert changes on in memory schema
+ self.rschema.rule = self.old_rule
+
+
class CWAttributeAddOp(MemSchemaOperation):
"""an attribute relation (CWAttribute) has been added:
* add the necessary column
@@ -407,12 +424,19 @@
description=entity.description, cardinality=entity.cardinality,
constraints=get_constraints(self.cnx, entity),
order=entity.ordernum, eid=entity.eid, **kwargs)
- self.cnx.vreg.schema.add_relation_def(rdefdef)
+ try:
+ self.cnx.vreg.schema.add_relation_def(rdefdef)
+ except BadSchemaDefinition:
+ # rdef has been infered then explicitly added (current consensus is
+ # not clear at all versus infered relation handling (and much
+ # probably buggy)
+ rdef = self.cnx.vreg.schema.rschema(rdefdef.name).rdefs[rdefdef.subject, rdefdef.object]
+ assert rdef.infered
self.cnx.execute('SET X ordernum Y+1 '
- 'WHERE X from_entity SE, SE eid %(se)s, X ordernum Y, '
- 'X ordernum >= %(order)s, NOT X eid %(x)s',
- {'x': entity.eid, 'se': fromentity.eid,
- 'order': entity.ordernum or 0})
+ 'WHERE X from_entity SE, SE eid %(se)s, X ordernum Y, '
+ 'X ordernum >= %(order)s, NOT X eid %(x)s',
+ {'x': entity.eid, 'se': fromentity.eid,
+ 'order': entity.ordernum or 0})
return rdefdef
def precommit_event(self):
@@ -425,6 +449,7 @@
default = default.unzpickle()
props = {'default': default,
'indexed': entity.indexed,
+ 'formula': entity.formula,
'fulltextindexed': entity.fulltextindexed,
'internationalizable': entity.internationalizable}
# update the in-memory schema first
@@ -447,8 +472,8 @@
column = SQL_PREFIX + rdefdef.name
try:
cnx.system_sql(str('ALTER TABLE %s ADD %s %s'
- % (table, column, attrtype)),
- rollback_on_failure=False)
+ % (table, column, attrtype)),
+ rollback_on_failure=False)
self.info('added column %s to table %s', table, column)
except Exception as ex:
# the column probably already exists. this occurs when
@@ -479,6 +504,12 @@
default = convert_default_value(self.rdefdef, default)
cnx.system_sql('UPDATE %s SET %s=%%(default)s' % (table, column),
{'default': default})
+ # if attribute is computed, compute it
+ if entity.formula:
+ # add rtype attribute for RelationDefinitionSchema api compat, this
+ # is what RecomputeAttributeOperation expect
+ rdefdef.rtype = rdefdef.name
+ RecomputeAttributeOperation.get_instance(cnx).add_data(rdefdef)
def revertprecommit_event(self):
# revert changes on in memory schema
@@ -616,6 +647,8 @@
self.null_allowed_changed = True
if 'fulltextindexed' in self.values:
UpdateFTIndexOp.get_instance(cnx).add_data(rdef.subject)
+ if 'formula' in self.values:
+ RecomputeAttributeOperation.get_instance(cnx).add_data(rdef)
def revertprecommit_event(self):
if self.rdef is None:
@@ -977,7 +1010,26 @@
MemSchemaCWRTypeDel(self._cw, rtype=name)
-class AfterAddCWRTypeHook(DelCWRTypeHook):
+class AfterAddCWComputedRTypeHook(SyncSchemaHook):
+ """after a CWComputedRType entity has been added:
+ * register an operation to add the relation type to the instance's
+ schema on commit
+
+ We don't know yet this point if a table is necessary
+ """
+ __regid__ = 'syncaddcwcomputedrtype'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWComputedRType')
+ events = ('after_add_entity',)
+
+ def __call__(self):
+ entity = self.entity
+ rtypedef = ybo.ComputedRelation(name=entity.name,
+ eid=entity.eid,
+ rule=entity.rule)
+ MemSchemaCWRTypeAdd(self._cw, rtypedef=rtypedef)
+
+
+class AfterAddCWRTypeHook(SyncSchemaHook):
"""after a CWRType entity has been added:
* register an operation to add the relation type to the instance's
schema on commit
@@ -985,6 +1037,7 @@
We don't know yet this point if a table is necessary
"""
__regid__ = 'syncaddcwrtype'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWRType')
events = ('after_add_entity',)
def __call__(self):
@@ -997,9 +1050,10 @@
MemSchemaCWRTypeAdd(self._cw, rtypedef=rtypedef)
-class BeforeUpdateCWRTypeHook(DelCWRTypeHook):
+class BeforeUpdateCWRTypeHook(SyncSchemaHook):
"""check name change, handle final"""
__regid__ = 'syncupdatecwrtype'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWRType')
events = ('before_update_entity',)
def __call__(self):
@@ -1017,6 +1071,23 @@
values=newvalues)
+class BeforeUpdateCWComputedRTypeHook(SyncSchemaHook):
+ """check name change, handle final"""
+ __regid__ = 'syncupdatecwcomputedrtype'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWComputedRType')
+ events = ('before_update_entity',)
+
+ def __call__(self):
+ entity = self.entity
+ check_valid_changes(self._cw, entity)
+ if 'rule' in entity.cw_edited:
+ old, new = entity.cw_edited.oldnewvalue('rule')
+ if old != new:
+ rschema = self._cw.vreg.schema.rschema(entity.name)
+ CWComputedRTypeUpdateOp(self._cw, rschema=rschema,
+ entity=entity, rule=new)
+
+
class AfterDelRelationTypeHook(SyncSchemaHook):
"""before deleting a CWAttribute or CWRelation entity:
* if this is a final or inlined relation definition, instantiate an
@@ -1053,6 +1124,24 @@
RDefDelOp(cnx, rdef=rdef)
+# CWComputedRType hooks #######################################################
+
+class DelCWComputedRTypeHook(SyncSchemaHook):
+ """before deleting a CWComputedRType entity:
+ * check that we don't remove a core relation type
+ * instantiate an operation to delete the relation type on commit
+ """
+ __regid__ = 'syncdelcwcomputedrtype'
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWComputedRType')
+ events = ('before_delete_entity',)
+
+ def __call__(self):
+ name = self.entity.name
+ if name in CORE_TYPES:
+ raise validation_error(self.entity, {None: _("can't be deleted")})
+ MemSchemaCWRTypeDel(self._cw, rtype=name)
+
+
# CWAttribute / CWRelation hooks ###############################################
class AfterAddCWAttributeHook(SyncSchemaHook):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hooks/test/data-computed/schema.py Fri Oct 17 18:16:58 2014 +0200
@@ -0,0 +1,31 @@
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+from yams.buildobjs import EntityType, String, Int, SubjectRelation
+
+THISYEAR = 2014
+
+class Person(EntityType):
+ name = String()
+ salaire = Int()
+ birth_year = Int(required=True)
+ travaille = SubjectRelation('Societe')
+ age = Int(formula='Any %d - D WHERE X birth_year D' % THISYEAR)
+
+class Societe(EntityType):
+ nom = String()
+ salaire_total = Int(formula='Any SUM(SA) GROUPBY X WHERE P travaille X, P salaire SA')
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hooks/test/unittest_synccomputed.py Fri Oct 17 18:16:58 2014 +0200
@@ -0,0 +1,139 @@
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+"""unit tests for computed attributes/relations hooks"""
+
+from unittest import TestCase
+
+from yams.buildobjs import EntityType, String, Int, SubjectRelation
+
+from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.schema import build_schema_from_namespace
+
+
+class FormulaDependenciesMatrixTC(TestCase):
+
+ def simple_schema(self):
+ THISYEAR = 2014
+
+ class Person(EntityType):
+ name = String()
+ salary = Int()
+ birth_year = Int(required=True)
+ works_for = SubjectRelation('Company')
+ age = Int(formula='Any %d - D WHERE X birth_year D' % THISYEAR)
+
+ class Company(EntityType):
+ name = String()
+ total_salary = Int(formula='Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA')
+
+ schema = build_schema_from_namespace(vars().items())
+ return schema
+
+ def setUp(self):
+ from cubicweb.hooks.synccomputed import _FormulaDependenciesMatrix
+ self.schema = self.simple_schema()
+ self.dependencies = _FormulaDependenciesMatrix(self.schema)
+
+ def test_computed_attributes_by_etype(self):
+ comp_by_etype = self.dependencies.computed_attribute_by_etype
+ self.assertEqual(len(comp_by_etype), 2)
+ values = comp_by_etype['Person']
+ self.assertEqual(len(values), 1)
+ self.assertEqual(values[0].rtype, 'age')
+ values = comp_by_etype['Company']
+ self.assertEqual(len(values), 1)
+ self.assertEqual(values[0].rtype, 'total_salary')
+
+ def test_computed_attribute_by_relation(self):
+ comp_by_rdef = self.dependencies.computed_attribute_by_relation
+ self.assertEqual(len(comp_by_rdef), 1)
+ key, values = iter(comp_by_rdef.iteritems()).next()
+ self.assertEqual(key.rtype, 'works_for')
+ self.assertEqual(len(values), 1)
+ self.assertEqual(values[0].rtype, 'total_salary')
+
+ def test_computed_attribute_by_etype_attrs(self):
+ comp_by_attr = self.dependencies.computed_attribute_by_etype_attrs
+ self.assertEqual(len(comp_by_attr), 1)
+ values = comp_by_attr['Person']
+ self.assertEqual(len(values), 2)
+ values = set((rdef.formula, tuple(v))
+ for rdef, v in values.iteritems())
+ self.assertEquals(values,
+ set((('Any 2014 - D WHERE X birth_year D', tuple(('birth_year',))),
+ ('Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA', tuple(('salary',)))))
+ )
+
+
+class ComputedAttributeTC(CubicWebTC):
+ appid = 'data-computed'
+
+ def setup_entities(self, req):
+ self.societe = req.create_entity('Societe', nom=u'Foo')
+ req.create_entity('Person', name=u'Titi', salaire=1000,
+ travaille=self.societe, birth_year=2001)
+ self.tata = req.create_entity('Person', name=u'Tata', salaire=2000,
+ travaille=self.societe, birth_year=1990)
+
+
+ def test_update_on_add_remove_relation(self):
+ """check the rewriting of a computed attribute"""
+ with self.admin_access.web_request() as req:
+ self.setup_entities(req)
+ req.cnx.commit()
+ rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"')
+ self.assertEqual(rset[0][0], 3000)
+ # Add relation.
+ toto = req.create_entity('Person', name=u'Toto', salaire=1500,
+ travaille=self.societe, birth_year=1988)
+ req.cnx.commit()
+ rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"')
+ self.assertEqual(rset[0][0], 4500)
+ # Delete relation.
+ toto.cw_set(travaille=None)
+ req.cnx.commit()
+ rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"')
+ self.assertEqual(rset[0][0], 3000)
+
+ def test_recompute_on_attribute_update(self):
+ """check the modification of an attribute triggers the update of the
+ computed attributes that depend on it"""
+ with self.admin_access.web_request() as req:
+ self.setup_entities(req)
+ req.cnx.commit()
+ rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"')
+ self.assertEqual(rset[0][0], 3000)
+ # Update attribute.
+ self.tata.cw_set(salaire=1000)
+ req.cnx.commit()
+ rset = req.execute('Any S WHERE X salaire_total S, X nom "Foo"')
+ self.assertEqual(rset[0][0], 2000)
+
+ def test_init_on_entity_creation(self):
+ """check the computed attribute is initialized on entity creation"""
+ with self.admin_access.web_request() as req:
+ p = req.create_entity('Person', name=u'Tata', salaire=2000,
+ birth_year=1990)
+ req.cnx.commit()
+ rset = req.execute('Any A, X WHERE X age A, X name "Tata"')
+ self.assertEqual(rset[0][0], 2014 - 1990)
+
+
+if __name__ == '__main__':
+ from logilab.common.testlib import unittest_main
+ unittest_main()
--- a/migration.py Tue Oct 07 10:06:24 2014 +0200
+++ b/migration.py Fri Oct 17 18:16:58 2014 +0200
@@ -247,12 +247,13 @@
local_ctx = self._create_context()
try:
import readline
- from rlcompleter import Completer
+ from cubicweb.toolsutils import CWShellCompleter
except ImportError:
# readline not available
pass
else:
- readline.set_completer(Completer(local_ctx).complete)
+ rql_completer = CWShellCompleter(local_ctx)
+ readline.set_completer(rql_completer.complete)
readline.parse_and_bind('tab: complete')
home_key = 'HOME'
if sys.platform == 'win32':
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.20.0_Any.py Fri Oct 17 18:16:58 2014 +0200
@@ -0,0 +1,2 @@
+add_relation_type('CWComputedRType')
+add_attribute('CWAttribute', 'formula')
--- a/req.py Tue Oct 07 10:06:24 2014 +0200
+++ b/req.py Fri Oct 17 18:16:58 2014 +0200
@@ -485,12 +485,16 @@
raise ValueError(self._('can\'t parse %(value)r (expected %(format)s)')
% {'value': value, 'format': format})
+ def _base_url(self, secure=None):
+ if secure:
+ return self.vreg.config.get('https-url') or self.vreg.config['base-url']
+ return self.vreg.config['base-url']
+
def base_url(self, secure=None):
"""return the root url of the instance
"""
- if secure:
- return self.vreg.config.get('https-url') or self.vreg.config['base-url']
- return self.vreg.config['base-url']
+ url = self._base_url(secure=secure)
+ return url if url is None else url.rstrip('/') + '/'
# abstract methods to override according to the web front-end #############
--- a/rqlrewrite.py Tue Oct 07 10:06:24 2014 +0200
+++ b/rqlrewrite.py Fri Oct 17 18:16:58 2014 +0200
@@ -31,7 +31,7 @@
from logilab.common.graph import has_path
from cubicweb import Unauthorized
-
+from cubicweb.schema import RRQLExpression
def cleanup_solutions(rqlst, solutions):
for sol in solutions:
@@ -208,11 +208,21 @@
because it create an unresolvable query (eg no solutions found)
"""
+class VariableFromSubQuery(Exception):
+ """flow control exception to indicate that a variable is coming from a
+ subquery, and let parent act accordingly
+ """
+ def __init__(self, variable):
+ self.variable = variable
+
class RQLRewriter(object):
- """insert some rql snippets into another rql syntax tree
+ """Insert some rql snippets into another rql syntax tree, for security /
+ relation vocabulary. This implies that it should only restrict results of
+ the original query, not generate new ones. Hence, inserted snippets are
+ inserted under an EXISTS node.
- this class *isn't thread safe*
+ This class *isn't thread safe*.
"""
def __init__(self, session):
@@ -338,7 +348,7 @@
def rewrite(self, select, snippets, kwargs, existingvars=None):
"""
snippets: (varmap, list of rql expression)
- with varmap a *tuple* (select var, snippet var)
+ with varmap a *dict* {select var: snippet var}
"""
self.select = select
# remove_solutions used below require a copy
@@ -350,7 +360,7 @@
self.pending_keys = []
self.existingvars = existingvars
# we have to annotate the rqlst before inserting snippets, even though
- # we'll have to redo it latter
+ # we'll have to redo it later
self.annotate(select)
self.insert_snippets(snippets)
if not self.exists_snippet and self.u_varname:
@@ -362,7 +372,7 @@
assert len(newsolutions) >= len(solutions), (
'rewritten rql %s has lost some solutions, there is probably '
'something wrong in your schema permission (for instance using a '
- 'RQLExpression which insert a relation which doesn\'t exists in '
+ 'RQLExpression which inserts a relation which doesn\'t exist in '
'the schema)\nOrig solutions: %s\nnew solutions: %s' % (
select, solutions, newsolutions))
if len(newsolutions) > len(solutions):
@@ -382,11 +392,10 @@
continue
self.insert_varmap_snippets(varmap, rqlexprs, varexistsmap)
- def insert_varmap_snippets(self, varmap, rqlexprs, varexistsmap):
+ def init_from_varmap(self, varmap, varexistsmap=None):
self.varmap = varmap
self.revvarmap = {}
self.varinfos = []
- self._insert_scope = None
for i, (selectvar, snippetvar) in enumerate(varmap):
assert snippetvar in 'SOX'
self.revvarmap[snippetvar] = (selectvar, i)
@@ -399,25 +408,35 @@
try:
vi['stinfo'] = sti = self.select.defined_vars[selectvar].stinfo
except KeyError:
- # variable may have been moved to a newly inserted subquery
- # we should insert snippet in that subquery
- subquery = self.select.aliases[selectvar].query
- assert len(subquery.children) == 1
- subselect = subquery.children[0]
- RQLRewriter(self.session).rewrite(subselect, [(varmap, rqlexprs)],
- self.kwargs)
- return
+ vi['stinfo'] = sti = self._subquery_variable(selectvar)
if varexistsmap is None:
# build an index for quick access to relations
vi['rhs_rels'] = {}
- for rel in sti['rhsrelations']:
+ for rel in sti.get('rhsrelations', []):
vi['rhs_rels'].setdefault(rel.r_type, []).append(rel)
vi['lhs_rels'] = {}
- for rel in sti['relations']:
- if not rel in sti['rhsrelations']:
+ for rel in sti.get('relations', []):
+ if not rel in sti.get('rhsrelations', []):
vi['lhs_rels'].setdefault(rel.r_type, []).append(rel)
else:
vi['rhs_rels'] = vi['lhs_rels'] = {}
+
+ def _subquery_variable(self, selectvar):
+ raise VariableFromSubQuery(selectvar)
+
+ def insert_varmap_snippets(self, varmap, rqlexprs, varexistsmap):
+ try:
+ self.init_from_varmap(varmap, varexistsmap)
+ except VariableFromSubQuery, ex:
+ # variable may have been moved to a newly inserted subquery
+ # we should insert snippet in that subquery
+ subquery = self.select.aliases[ex.variable].query
+ assert len(subquery.children) == 1, subquery
+ subselect = subquery.children[0]
+ RQLRewriter(self.session).rewrite(subselect, [(varmap, rqlexprs)],
+ self.kwargs)
+ return
+ self._insert_scope = None
previous = None
inserted = False
for rqlexpr in rqlexprs:
@@ -450,6 +469,11 @@
finally:
self.existingvars = existing
+ def _inserted_root(self, new):
+ if not isinstance(new, (n.Exists, n.Not)):
+ new = n.Exists(new)
+ return new
+
def _insert_snippet(self, varmap, previous, new):
"""insert `new` snippet into the syntax tree, which have been rewritten
using `varmap`. In cases where an action is protected by several rql
@@ -474,8 +498,7 @@
self.insert_pending()
#self._insert_scope = None
return new
- if not isinstance(new, (n.Exists, n.Not)):
- new = n.Exists(new)
+ new = self._inserted_root(new)
if previous is None:
insert_scope.add_restriction(new)
else:
@@ -869,3 +892,40 @@
if self._insert_scope is None:
return self.select
return self._insert_scope.stmt
+
+
+class RQLRelationRewriter(RQLRewriter):
+ """Insert some rql snippets into another rql syntax tree, replacing computed
+ relations by their associated rule.
+
+ This class *isn't thread safe*.
+ """
+ def __init__(self, session):
+ super(RQLRelationRewriter, self).__init__(session)
+ self.rules = {}
+ for rschema in self.schema.iter_computed_relations():
+ self.rules[rschema.type] = RRQLExpression(rschema.rule)
+
+ def rewrite(self, union, kwargs=None):
+ self.kwargs = kwargs
+ self.removing_ambiguity = False
+ self.existingvars = None
+ self.pending_keys = None
+ for relation in union.iget_nodes(n.Relation):
+ if relation.r_type in self.rules:
+ self.select = relation.stmt
+ self.solutions = solutions = self.select.solutions[:]
+ self.current_expr = self.rules[relation.r_type]
+ self._insert_scope = relation.scope
+ self.rewritten = {}
+ lhs, rhs = relation.get_variable_parts()
+ varmap = {lhs.name: 'S', rhs.name: 'O'}
+ self.init_from_varmap(tuple(sorted(varmap.items())))
+ self.insert_snippet(varmap, self.current_expr.snippet_rqlst)
+ self.select.remove_node(relation)
+
+ def _subquery_variable(self, selectvar):
+ return self.select.aliases[selectvar].stinfo
+
+ def _inserted_root(self, new):
+ return new
--- a/schema.py Tue Oct 07 10:06:24 2014 +0200
+++ b/schema.py Fri Oct 17 18:16:58 2014 +0200
@@ -37,9 +37,11 @@
RelationDefinitionSchema, PermissionMixIn, role_name
from yams.constraints import BaseConstraint, FormatConstraint
from yams.reader import (CONSTRAINTS, PyFileReader, SchemaLoader,
- obsolete as yobsolete, cleanup_sys_modules)
+ obsolete as yobsolete, cleanup_sys_modules,
+ fill_schema_from_namespace)
from rql import parse, nodes, RQLSyntaxError, TypeResolverException
+from rql.analyze import ETypeResolver
import cubicweb
from cubicweb import ETYPE_NAME_MAP, ValidationError, Unauthorized
@@ -81,7 +83,7 @@
# set of entity and relation types used to build the schema
SCHEMA_TYPES = set((
- 'CWEType', 'CWRType', 'CWAttribute', 'CWRelation',
+ 'CWEType', 'CWRType', 'CWComputedRType', 'CWAttribute', 'CWRelation',
'CWConstraint', 'CWConstraintType', 'CWUniqueTogetherConstraint',
'RQLExpression',
'specializes',
@@ -106,6 +108,11 @@
ybo.ETYPE_PROPERTIES += ('eid',)
ybo.RTYPE_PROPERTIES += ('eid',)
+def build_schema_from_namespace(items):
+ schema = CubicWebSchema('noname')
+ fill_schema_from_namespace(schema, items, register_base_types=False)
+ return schema
+
# Bases for manipulating RQL in schema #########################################
def guess_rrqlexpr_mainvars(expression):
@@ -118,7 +125,8 @@
if 'U' in defined:
mainvars.add('U')
if not mainvars:
- raise Exception('unable to guess selection variables')
+ raise BadSchemaDefinition('unable to guess selection variables in %r'
+ % expression)
return mainvars
def split_expression(rqlstring):
@@ -136,6 +144,44 @@
return u', '.join(' '.join(expr.split()) for expr in rqlstring.split(','))
+def _check_valid_formula(rdef, formula_rqlst):
+ """Check the formula is a valid RQL query with some restriction (no union,
+ single selected node, etc.), raise BadSchemaDefinition if not
+ """
+ if len(formula_rqlst.children) != 1:
+ raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: '
+ 'can not use UNION in formula %(form)r' %
+ {'attr' : rdef.rtype,
+ 'etype' : rdef.subject.type,
+ 'form' : rdef.formula})
+ select = formula_rqlst.children[0]
+ if len(select.selection) != 1:
+ raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: '
+ 'can only select one term in formula %(form)r' %
+ {'attr' : rdef.rtype,
+ 'etype' : rdef.subject.type,
+ 'form' : rdef.formula})
+ term = select.selection[0]
+ types = set(term.get_type(sol) for sol in select.solutions)
+ if len(types) != 1:
+ raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: '
+ 'multiple possible types (%(types)s) for formula %(form)r' %
+ {'attr' : rdef.rtype,
+ 'etype' : rdef.subject.type,
+ 'types' : list(types),
+ 'form' : rdef.formula})
+ computed_type = types.pop()
+ expected_type = rdef.object.type
+ if computed_type != expected_type:
+ raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: '
+ 'computed attribute type (%(comp_type)s) mismatch with '
+ 'specified type (%(attr_type)s)' %
+ {'attr' : rdef.rtype,
+ 'etype' : rdef.subject.type,
+ 'comp_type' : computed_type,
+ 'attr_type' : expected_type})
+
+
class RQLExpression(object):
"""Base class for RQL expression used in schema (constraints and
permissions)
@@ -146,6 +192,7 @@
# to be defined in concrete classes
rqlst = None
predefined_variables = None
+ full_rql = None
def __init__(self, expression, mainvars, eid):
"""
@@ -1001,6 +1048,59 @@
def schema_by_eid(self, eid):
return self._eid_index[eid]
+ def iter_computed_attributes(self):
+ for relation in self.relations():
+ for rdef in relation.rdefs.itervalues():
+ if rdef.final and rdef.formula is not None:
+ yield rdef
+
+ def iter_computed_relations(self):
+ for relation in self.relations():
+ if relation.rule:
+ yield relation
+
+ def finalize(self):
+ super(CubicWebSchema, self).finalize()
+ self.finalize_computed_attributes()
+ self.finalize_computed_relations()
+
+ def finalize_computed_attributes(self):
+ """Check computed attributes validity (if any), else raise
+ `BadSchemaDefinition`
+ """
+ analyzer = ETypeResolver(self)
+ for rdef in self.iter_computed_attributes():
+ rqlst = parse(rdef.formula)
+ select = rqlst.children[0]
+ analyzer.visit(select)
+ _check_valid_formula(rdef, rqlst)
+ rdef.formula_select = select # avoid later recomputation
+
+
+ def finalize_computed_relations(self):
+ """Build relation definitions for computed relations
+
+ The subject and object types are infered using rql analyzer.
+ """
+ analyzer = ETypeResolver(self)
+ for rschema in self.iter_computed_relations():
+ # XXX rule is valid if both S and O are defined and not in an exists
+ rqlexpr = RRQLExpression(rschema.rule)
+ rqlst = rqlexpr.snippet_rqlst
+ analyzer.visit(rqlst)
+ couples = set((sol['S'], sol['O']) for sol in rqlst.solutions)
+ for subjtype, objtype in couples:
+ if self[objtype].final:
+ raise BadSchemaDefinition('computed relations cannot be final')
+ rdef = ybo.RelationDefinition(
+ subjtype, rschema.type, objtype)
+ rdef.infered = True
+ self.add_relation_def(rdef)
+
+ def rebuild_infered_relations(self):
+ super(CubicWebSchema, self).rebuild_infered_relations()
+ self.finalize_computed_relations()
+
# additional cw specific constraints ###########################################
@@ -1263,6 +1363,7 @@
# only defining here to prevent pylint from complaining
info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
+
set_log_methods(CubicWebSchemaLoader, getLogger('cubicweb.schemaloader'))
set_log_methods(BootstrapSchemaLoader, getLogger('cubicweb.bootstrapschemaloader'))
set_log_methods(RQLExpression, getLogger('cubicweb.schema'))
--- a/schemas/bootstrap.py Tue Oct 07 10:06:24 2014 +0200
+++ b/schemas/bootstrap.py Fri Oct 17 18:16:58 2014 +0200
@@ -57,6 +57,16 @@
final = Boolean(description=_('automatic'))
+class CWComputedRType(EntityType):
+ """define a virtual relation type, used to build the instance schema"""
+ __permissions__ = PUB_SYSTEM_ENTITY_PERMS
+ name = String(required=True, indexed=True, internationalizable=True,
+ unique=True, maxsize=64)
+ description = RichString(internationalizable=True,
+ description=_('semantic description of this relation type'))
+ rule = String(required=True)
+
+
class CWAttribute(EntityType):
"""define a final relation: link a final relation type from a non final
entity to a final entity type.
@@ -80,6 +90,7 @@
description=_('subject/object cardinality'))
ordernum = Int(description=('control subject entity\'s relations order'), default=0)
+ formula = String(maxsize=2048)
indexed = Boolean(description=_('create an index for quick search on this attribute'))
fulltextindexed = Boolean(description=_('index this attribute\'s value in the plain text index'))
internationalizable = Boolean(description=_('is this attribute\'s value translatable'))
--- a/server/migractions.py Tue Oct 07 10:06:24 2014 +0200
+++ b/server/migractions.py Fri Oct 17 18:16:58 2014 +0200
@@ -579,6 +579,9 @@
"""
subjtype, objtype = str(subjtype), str(objtype)
rschema = self.fs_schema.rschema(rtype)
+ if rschema.rule:
+ raise ExecutionError('Cannot synchronize a relation definition for a '
+ 'computed relation (%s)' % rschema)
reporschema = self.repo.schema.rschema(rschema)
if (subjtype, rschema, objtype) in self._synchronized:
return
@@ -1018,11 +1021,13 @@
if rtype in reposchema:
print 'warning: relation type %s is already known, skip addition' % (
rtype)
+ elif rschema.rule:
+ ss.execschemarql(execute, rschema, ss.crschema2rql(rschema))
else:
# register the relation into CWRType and insert necessary relation
# definitions
ss.execschemarql(execute, rschema, ss.rschema2rql(rschema, addrdef=False))
- if addrdef:
+ if not rschema.rule and addrdef:
self.commit()
gmap = self.group_mapping()
cmap = self.cstrtype_mapping()
@@ -1057,8 +1062,12 @@
def cmd_drop_relation_type(self, rtype, commit=True):
"""unregister an existing relation type"""
- # unregister the relation from CWRType
- self.rqlexec('DELETE CWRType X WHERE X name %r' % rtype,
+ rschema = self.repo.schema[rtype]
+ if rschema.rule:
+ etype = 'CWComputedRType'
+ else:
+ etype = 'CWRType'
+ self.rqlexec('DELETE %s X WHERE X name %r' % (etype, rtype),
ask_confirm=self.verbosity>=2)
if commit:
self.commit()
@@ -1086,6 +1095,9 @@
schema definition file
"""
rschema = self.fs_schema.rschema(rtype)
+ if rschema.rule:
+ raise ExecutionError('Cannot add a relation definition for a '
+ 'computed relation (%s)' % rschema)
if not rtype in self.repo.schema:
self.cmd_add_relation_type(rtype, addrdef=False, commit=True)
if (subjtype, objtype) in self.repo.schema.rschema(rtype).rdefs:
@@ -1113,6 +1125,9 @@
def cmd_drop_relation_definition(self, subjtype, rtype, objtype, commit=True):
"""unregister an existing relation definition"""
rschema = self.repo.schema.rschema(rtype)
+ if rschema.rule:
+ raise ExecutionError('Cannot drop a relation definition for a '
+ 'computed relation (%s)' % rschema)
# unregister the definition from CWAttribute or CWRelation
if rschema.final:
etype = 'CWAttribute'
--- a/server/querier.py Tue Oct 07 10:06:24 2014 +0200
+++ b/server/querier.py Fri Oct 17 18:16:58 2014 +0200
@@ -28,6 +28,7 @@
from yams import BASE_TYPES
from cubicweb import ValidationError, Unauthorized, UnknownEid
+from cubicweb.rqlrewrite import RQLRelationRewriter
from cubicweb import Binary, server
from cubicweb.rset import ResultSet
@@ -72,7 +73,44 @@
except AttributeError:
return cnx.entity_metas(term.eval(args))['type']
-def check_read_access(cnx, rqlst, solution, args):
+def check_relations_read_access(cnx, select, args):
+ """Raise :exc:`Unauthorized` if the given user doesn't have credentials to
+ read relations used in the givel syntaxt tree
+ """
+ # use `term_etype` since we've to deal with rewritten constants here,
+ # when used as an external source by another repository.
+ # XXX what about local read security w/ those rewritten constants...
+ # XXX constants can also happen in some queries generated by req.find()
+ DBG = (server.DEBUG & server.DBG_SEC) and 'read' in server._SECURITY_CAPS
+ schema = cnx.repo.schema
+ user = cnx.user
+ if select.where is not None:
+ for rel in select.where.iget_nodes(Relation):
+ for solution in select.solutions:
+ # XXX has_text may have specific perm ?
+ if rel.r_type in READ_ONLY_RTYPES:
+ continue
+ rschema = schema.rschema(rel.r_type)
+ if rschema.final:
+ eschema = schema.eschema(term_etype(cnx, rel.children[0],
+ solution, args))
+ rdef = eschema.rdef(rschema)
+ else:
+ rdef = rschema.rdef(term_etype(cnx, rel.children[0],
+ solution, args),
+ term_etype(cnx, rel.children[1].children[0],
+ solution, args))
+ if not user.matching_groups(rdef.get_groups('read')):
+ if DBG:
+ print ('check_read_access: %s %s does not match %s' %
+ (rdef, user.groups, rdef.get_groups('read')))
+ # XXX rqlexpr not allowed
+ raise Unauthorized('read', rel.r_type)
+ if DBG:
+ print ('check_read_access: %s %s matches %s' %
+ (rdef, user.groups, rdef.get_groups('read')))
+
+def get_local_checks(cnx, rqlst, solution):
"""Check that the given user has credentials to access data read by the
query and return a dict defining necessary "local checks" (i.e. rql
expression in read permission defined in the schema) where no group grants
@@ -80,50 +118,27 @@
Returned dictionary's keys are variable names and values the rql expressions
for this variable (with the given solution).
+
+ Raise :exc:`Unauthorized` if access is known to be defined, i.e. if there is
+ no matching group and no local permissions.
"""
- # use `term_etype` since we've to deal with rewritten constants here,
- # when used as an external source by another repository.
- # XXX what about local read security w/ those rewritten constants...
DBG = (server.DEBUG & server.DBG_SEC) and 'read' in server._SECURITY_CAPS
schema = cnx.repo.schema
- if rqlst.where is not None:
- for rel in rqlst.where.iget_nodes(Relation):
- # XXX has_text may have specific perm ?
- if rel.r_type in READ_ONLY_RTYPES:
- continue
- rschema = schema.rschema(rel.r_type)
- if rschema.final:
- eschema = schema.eschema(term_etype(cnx, rel.children[0],
- solution, args))
- rdef = eschema.rdef(rschema)
- else:
- rdef = rschema.rdef(term_etype(cnx, rel.children[0],
- solution, args),
- term_etype(cnx, rel.children[1].children[0],
- solution, args))
- if not cnx.user.matching_groups(rdef.get_groups('read')):
- if DBG:
- print ('check_read_access: %s %s does not match %s' %
- (rdef, cnx.user.groups, rdef.get_groups('read')))
- # XXX rqlexpr not allowed
- raise Unauthorized('read', rel.r_type)
- if DBG:
- print ('check_read_access: %s %s matches %s' %
- (rdef, cnx.user.groups, rdef.get_groups('read')))
+ user = cnx.user
localchecks = {}
# iterate on defined_vars and not on solutions to ignore column aliases
for varname in rqlst.defined_vars:
eschema = schema.eschema(solution[varname])
if eschema.final:
continue
- if not cnx.user.matching_groups(eschema.get_groups('read')):
+ if not user.matching_groups(eschema.get_groups('read')):
erqlexprs = eschema.get_rqlexprs('read')
if not erqlexprs:
ex = Unauthorized('read', solution[varname])
ex.var = varname
if DBG:
print ('check_read_access: %s %s %s %s' %
- (varname, eschema, cnx.user.groups, eschema.get_groups('read')))
+ (varname, eschema, user.groups, eschema.get_groups('read')))
raise ex
# don't insert security on variable only referenced by 'NOT X relation Y' or
# 'NOT EXISTS(X relation Y)'
@@ -133,7 +148,8 @@
if (not schema.rschema(r.r_type).final
and ((isinstance(r.parent, Exists) and r.parent.neged(strict=True))
or isinstance(r.parent, Not)))])
- != len(varinfo['relations'])):
+ !=
+ len(varinfo['relations'])):
localchecks[varname] = erqlexprs
return localchecks
@@ -258,7 +274,7 @@
newsolutions = []
for solution in rqlst.solutions:
try:
- localcheck = check_read_access(cnx, rqlst, solution, self.args)
+ localcheck = get_local_checks(cnx, rqlst, solution)
except Unauthorized as ex:
msg = 'remove %s from solutions since %s has no %s access to %s'
msg %= (solution, cnx.user.login, ex.args[0], ex.args[1])
@@ -573,10 +589,14 @@
if cnx.read_security:
for select in rqlst.children:
check_no_password_selected(select)
+ check_relations_read_access(cnx, select, args)
# on select query, always copy the cached rqlst so we don't have to
# bother modifying it. This is not necessary on write queries since
# a new syntax tree is built from them.
rqlst = rqlst.copy()
+ # Rewrite computed relations
+ rewriter = RQLRelationRewriter(cnx)
+ rewriter.rewrite(rqlst, args)
self._annotate(rqlst)
if args:
# different SQL generated when some argument is None or not (IS
--- a/server/schemaserial.py Tue Oct 07 10:06:24 2014 +0200
+++ b/server/schemaserial.py Fri Oct 17 18:16:58 2014 +0200
@@ -87,6 +87,27 @@
"""
repo = cnx.repo
dbhelper = repo.system_source.dbhelper
+
+ # Computed Rtype
+ with cnx.ensure_cnx_set:
+ tables = set(dbhelper.list_tables(cnx.cnxset.cu))
+ has_computed_relations = 'cw_CWComputedRType' in tables
+ if has_computed_relations:
+ rset = cnx.execute(
+ 'Any X, N, R, D WHERE X is CWComputedRType, X name N, '
+ 'X rule R, X description D')
+ for eid, rule_name, rule, description in rset.rows:
+ rtype = ybo.ComputedRelation(name=rule_name, rule=rule, eid=eid,
+ description=description)
+ schema.add_relation_type(rtype)
+ # computed attribute
+ try:
+ cnx.system_sql("SELECT cw_formula FROM cw_CWAttribute")
+ has_computed_attributes = True
+ except Exception:
+ cnx.rollback()
+ has_computed_attributes = False
+
# XXX bw compat (3.6 migration)
with cnx.ensure_cnx_set:
sqlcu = cnx.system_sql("SELECT * FROM cw_CWRType WHERE cw_name='symetric'")
@@ -100,6 +121,7 @@
copiedeids = set()
permsidx = deserialize_ertype_permissions(cnx)
schema.reading_from_database = True
+ # load every entity types
for eid, etype, desc in cnx.execute(
'Any X, N, D WHERE X is CWEType, X name N, X description D',
build_descr=False):
@@ -148,6 +170,7 @@
eschema = schema.add_entity_type(
ybo.EntityType(name=etype, description=desc, eid=eid))
set_perms(eschema, permsidx)
+ # load inheritance relations
for etype, stype in cnx.execute(
'Any XN, ETN WHERE X is CWEType, X name XN, X specializes ET, ET name ETN',
build_descr=False):
@@ -155,6 +178,7 @@
stype = ETYPE_NAME_MAP.get(stype, stype)
schema.eschema(etype)._specialized_type = stype
schema.eschema(stype)._specialized_by.append(etype)
+ # load every relation types
for eid, rtype, desc, sym, il, ftc in cnx.execute(
'Any X,N,D,S,I,FTC WHERE X is CWRType, X name N, X description D, '
'X symmetric S, X inlined I, X fulltext_container FTC', build_descr=False):
@@ -163,6 +187,7 @@
ybo.RelationType(name=rtype, description=desc,
symmetric=bool(sym), inlined=bool(il),
fulltext_container=ftc, eid=eid))
+ # remains to load every relation definitions (ie relations and attributes)
cstrsidx = deserialize_rdef_constraints(cnx)
pendingrdefs = []
# closure to factorize common code of attribute/relation rdef addition
@@ -193,29 +218,37 @@
# Get the type parameters for additional base types.
try:
extra_props = dict(cnx.execute('Any X, XTP WHERE X is CWAttribute, '
- 'X extra_props XTP'))
+ 'X extra_props XTP'))
except Exception:
cnx.critical('Previous CRITICAL notification about extra_props is not '
- 'a problem if you are migrating to cubicweb 3.17')
+ 'a problem if you are migrating to cubicweb 3.17')
extra_props = {} # not yet in the schema (introduced by 3.17 migration)
- for values in cnx.execute(
- 'Any X,SE,RT,OE,CARD,ORD,DESC,IDX,FTIDX,I18N,DFLT WHERE X is CWAttribute,'
- 'X relation_type RT, X cardinality CARD, X ordernum ORD, X indexed IDX,'
- 'X description DESC, X internationalizable I18N, X defaultval DFLT,'
- 'X fulltextindexed FTIDX, X from_entity SE, X to_entity OE',
- build_descr=False):
- rdefeid, seid, reid, oeid, card, ord, desc, idx, ftidx, i18n, default = values
- typeparams = extra_props.get(rdefeid)
- typeparams = json.load(typeparams) if typeparams else {}
+
+ # load attributes
+ rql = ('Any X,SE,RT,OE,CARD,ORD,DESC,IDX,FTIDX,I18N,DFLT%(fm)s '
+ 'WHERE X is CWAttribute, X relation_type RT, X cardinality CARD,'
+ ' X ordernum ORD, X indexed IDX, X description DESC, '
+ ' X internationalizable I18N, X defaultval DFLT,%(fmsnip)s'
+ ' X fulltextindexed FTIDX, X from_entity SE, X to_entity OE')
+ if has_computed_attributes:
+ rql = rql % {'fm': ',FM', 'fmsnip': 'X formula FM,'}
+ else:
+ rql = rql % {'fm': '', 'fmsnip': ''}
+ for values in cnx.execute(rql, build_descr=False):
+ attrs = dict(zip(
+ ('rdefeid', 'seid', 'reid', 'oeid', 'cardinality',
+ 'order', 'description', 'indexed', 'fulltextindexed',
+ 'internationalizable', 'default', 'formula'), values))
+ typeparams = extra_props.get(attrs['rdefeid'])
+ attrs.update(json.load(typeparams) if typeparams else {})
+ default = attrs['default']
if default is not None:
if isinstance(default, Binary):
# while migrating from 3.17 to 3.18, we still have to
# handle String defaults
- default = default.unzpickle()
- _add_rdef(rdefeid, seid, reid, oeid,
- cardinality=card, description=desc, order=ord,
- indexed=idx, fulltextindexed=ftidx, internationalizable=i18n,
- default=default, **typeparams)
+ attrs['default'] = default.unzpickle()
+ _add_rdef(**attrs)
+ # load relations
for values in cnx.execute(
'Any X,SE,RT,OE,CARD,ORD,DESC,C WHERE X is CWRelation, X relation_type RT,'
'X cardinality CARD, X ordernum ORD, X description DESC, '
@@ -252,6 +285,7 @@
eschema._unique_together.append(tuple(sorted(unique_together)))
schema.infer_specialization_rules()
cnx.commit()
+ schema.finalize()
schema.reading_from_database = False
@@ -309,19 +343,14 @@
"""synchronize schema and permissions in the database according to
current schema
"""
- quiet = os.environ.get('APYCOT_ROOT')
- if not quiet:
- _title = '-> storing the schema in the database '
- print _title,
+ _title = '-> storing the schema in the database '
+ print _title,
execute = cnx.execute
eschemas = schema.entities()
- if not quiet:
- pb_size = (len(eschemas + schema.relations())
- + len(CONSTRAINTS)
- + len([x for x in eschemas if x.specializes()]))
- pb = ProgressBar(pb_size, title=_title)
- else:
- pb = None
+ pb_size = (len(eschemas + schema.relations())
+ + len(CONSTRAINTS)
+ + len([x for x in eschemas if x.specializes()]))
+ pb = ProgressBar(pb_size, title=_title)
groupmap = group_mapping(cnx, interactive=False)
# serialize all entity types, assuring CWEType is serialized first for proper
# is / is_instance_of insertion
@@ -346,6 +375,11 @@
if pb is not None:
pb.update()
continue
+ if rschema.rule:
+ execschemarql(execute, rschema, crschema2rql(rschema))
+ if pb is not None:
+ pb.update()
+ continue
execschemarql(execute, rschema, rschema2rql(rschema, addrdef=False))
if rschema.symmetric:
rdefs = [rdef for k, rdef in rschema.rdefs.iteritems()
@@ -366,8 +400,7 @@
execute(rql, kwargs, build_descr=False)
if pb is not None:
pb.update()
- if not quiet:
- print
+ print
# high level serialization functions
@@ -462,7 +495,7 @@
# rtype serialization
def rschema2rql(rschema, cstrtypemap=None, addrdef=True, groupmap=None):
- """return a list of rql insert statements to enter a relation schema
+ """generate rql insert statements to enter a relation schema
in the database as an CWRType entity
"""
if rschema.type == 'has_text':
@@ -489,10 +522,22 @@
relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)]
return relations, values
+def crschema2rql(crschema):
+ relations, values = crschema_relations_values(crschema)
+ yield 'INSERT CWComputedRType X: %s' % ','.join(relations), values
+
+def crschema_relations_values(crschema):
+ values = _ervalues(crschema)
+ values['rule'] = unicode(crschema.rule)
+ # XXX why oh why?
+ del values['final']
+ relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)]
+ return relations, values
+
# rdef serialization
def rdef2rql(rdef, cstrtypemap, groupmap=None):
- # don't serialize infered relations
+ # don't serialize inferred relations
if rdef.infered:
return
relations, values = _rdef_values(rdef)
@@ -585,9 +630,13 @@
yield 'SET %s WHERE X eid %%(x)s' % ','.join(relations), values
def updaterschema2rql(rschema, eid):
- relations, values = rschema_relations_values(rschema)
- values['x'] = eid
- yield 'SET %s WHERE X eid %%(x)s' % ','.join(relations), values
+ if rschema.rule:
+ yield ('SET X rule %(r)s WHERE X eid %(x)s',
+ {'x': eid, 'r': unicode(rschema.rule)})
+ else:
+ relations, values = rschema_relations_values(rschema)
+ values['x'] = eid
+ yield 'SET %s WHERE X eid %%(x)s' % ','.join(relations), values
def updaterdef2rql(rdef, eid):
relations, values = _rdef_values(rdef)
--- a/server/sources/__init__.py Tue Oct 07 10:06:24 2014 +0200
+++ b/server/sources/__init__.py Fri Oct 17 18:16:58 2014 +0200
@@ -105,7 +105,7 @@
self.support_relations['identity'] = False
self.eid = eid
self.public_config = source_config.copy()
- self.public_config.setdefault('use-cwuri-as-url', self.use_cwuri_as_url)
+ self.public_config['use-cwuri-as-url'] = self.use_cwuri_as_url
self.remove_sensitive_information(self.public_config)
self.uri = source_config.pop('uri')
set_log_methods(self, getLogger('cubicweb.sources.'+self.uri))
--- a/server/sources/datafeed.py Tue Oct 07 10:06:24 2014 +0200
+++ b/server/sources/datafeed.py Fri Oct 17 18:16:58 2014 +0200
@@ -83,6 +83,13 @@
'help': ('Timeout of HTTP GET requests, when synchronizing a source.'),
'group': 'datafeed-source', 'level': 2,
}),
+ ('use-cwuri-as-url',
+ {'type': 'yn',
+ 'default': None, # explicitly unset
+ 'help': ('Use cwuri (i.e. external URL) for link to the entity '
+ 'instead of its local URL.'),
+ 'group': 'datafeed-source', 'level': 1,
+ }),
)
def check_config(self, source_entity):
@@ -107,6 +114,12 @@
self.synchro_interval = timedelta(seconds=typed_config['synchronization-interval'])
self.max_lock_lifetime = timedelta(seconds=typed_config['max-lock-lifetime'])
self.http_timeout = typed_config['http-timeout']
+ # if typed_config['use-cwuri-as-url'] is set, we have to update
+ # use_cwuri_as_url attribute and public configuration dictionary
+ # accordingly
+ if typed_config['use-cwuri-as-url'] is not None:
+ self.use_cwuri_as_url = typed_config['use-cwuri-as-url']
+ self.public_config['use-cwuri-as-url'] = self.use_cwuri_as_url
def init(self, activated, source_entity):
super(DataFeedSource, self).init(activated, source_entity)
@@ -285,12 +298,39 @@
self.stats = {'created': set(), 'updated': set(), 'checked': set()}
def normalize_url(self, url):
- from cubicweb.sobjects import URL_MAPPING # available after registration
+ """Normalize an url by looking if there is a replacement for it in
+ `cubicweb.sobjects.URL_MAPPING`.
+
+ This dictionary allow to redirect from one host to another, which may be
+ useful for example in case of test instance using production data, while
+ you don't want to load the external source nor to hack your `/etc/hosts`
+ file.
+ """
+ # local import mandatory, it's available after registration
+ from cubicweb.sobjects import URL_MAPPING
for mappedurl in URL_MAPPING:
if url.startswith(mappedurl):
return url.replace(mappedurl, URL_MAPPING[mappedurl], 1)
return url
+ def retrieve_url(self, url, data=None, headers=None):
+ """Return stream linked by the given url:
+ * HTTP urls will be normalized (see :meth:`normalize_url`)
+ * handle file:// URL
+ * other will be considered as plain content, useful for testing purpose
+ """
+ if url.startswith('http'):
+ url = self.normalize_url(url)
+ if data:
+ self.source.info('POST %s %s', url, data)
+ else:
+ self.source.info('GET %s', url)
+ req = urllib2.Request(url, data, headers)
+ return _OPENER.open(req, timeout=self.source.http_timeout)
+ if url.startswith('file://'):
+ return URLLibResponseAdapter(open(url[7:]), url)
+ return URLLibResponseAdapter(StringIO.StringIO(url), url)
+
def add_schema_config(self, schemacfg, checkonly=False):
"""added CWSourceSchemaConfig, modify mapping accordingly"""
msg = schemacfg._cw._("this parser doesn't use a mapping")
@@ -427,14 +467,7 @@
return error
def parse(self, url):
- if url.startswith('http'):
- url = self.normalize_url(url)
- self.source.info('GET %s', url)
- stream = _OPENER.open(url, timeout=self.source.http_timeout)
- elif url.startswith('file://'):
- stream = open(url[7:])
- else:
- stream = StringIO.StringIO(url)
+ stream = self.retrieve_url(url)
return self.parse_etree(etree.parse(stream).getroot())
def parse_etree(self, document):
@@ -455,6 +488,27 @@
return exists(extid[7:])
return False
+
+class URLLibResponseAdapter(object):
+ """Thin wrapper to be used to fake a value returned by urllib2.urlopen"""
+ def __init__(self, stream, url, code=200):
+ self._stream = stream
+ self._url = url
+ self.code = code
+
+ def read(self, *args):
+ return self._stream.read(*args)
+
+ def geturl(self):
+ return self._url
+
+ def getcode(self):
+ return self.code
+
+ def info(self):
+ from mimetools import Message
+ return Message(StringIO.StringIO())
+
# use a cookie enabled opener to use session cookie if any
_OPENER = urllib2.build_opener()
try:
--- a/server/sources/native.py Tue Oct 07 10:06:24 2014 +0200
+++ b/server/sources/native.py Fri Oct 17 18:16:58 2014 +0200
@@ -318,10 +318,16 @@
'want trusted authentication for the database connection',
'group': 'native-source', 'level': 2,
}),
+ ('db-statement-timeout',
+ {'type': 'int',
+ 'default': 0,
+ 'help': 'sql statement timeout, in milliseconds (postgres only)',
+ 'group': 'native-source', 'level': 2,
+ }),
)
def __init__(self, repo, source_config, *args, **kwargs):
- SQLAdapterMixIn.__init__(self, source_config)
+ SQLAdapterMixIn.__init__(self, source_config, repairing=repo.config.repairing)
self.authentifiers = [LoginPasswordAuthentifier(self)]
if repo.config['allow-email-login']:
self.authentifiers.insert(0, EmailPasswordAuthentifier(self))
--- a/server/sqlutils.py Tue Oct 07 10:06:24 2014 +0200
+++ b/server/sqlutils.py Fri Oct 17 18:16:58 2014 +0200
@@ -47,7 +47,7 @@
return subprocess.call(cmd)
-def sqlexec(sqlstmts, cursor_or_execute, withpb=not os.environ.get('APYCOT_ROOT'),
+def sqlexec(sqlstmts, cursor_or_execute, withpb=True,
pbtitle='', delimiter=';', cnx=None):
"""execute sql statements ignoring DROP/ CREATE GROUP or USER statements
error.
@@ -299,7 +299,7 @@
"""
cnx_wrap = ConnectionWrapper
- def __init__(self, source_config):
+ def __init__(self, source_config, repairing=False):
try:
self.dbdriver = source_config['db-driver'].lower()
dbname = source_config['db-name']
@@ -328,6 +328,14 @@
if self.dbdriver == 'sqlite':
self.cnx_wrap = SqliteConnectionWrapper
self.dbhelper.dbname = abspath(self.dbhelper.dbname)
+ if not repairing:
+ statement_timeout = int(source_config.get('db-statement-timeout', 0))
+ if statement_timeout > 0:
+ def set_postgres_timeout(cnx):
+ cnx.cursor().execute('SET statement_timeout to %d' % statement_timeout)
+ cnx.commit()
+ postgres_hooks = SQL_CONNECT_HOOKS['postgres']
+ postgres_hooks.append(set_postgres_timeout)
def wrapped_connection(self):
"""open and return a connection to the database, wrapped into a class
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data-cwep002/schema.py Fri Oct 17 18:16:58 2014 +0200
@@ -0,0 +1,35 @@
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+
+from yams.buildobjs import EntityType, RelationDefinition, Int, ComputedRelation
+
+class Person(EntityType):
+ salary = Int()
+
+class works_for(RelationDefinition):
+ subject = 'Person'
+ object = 'Company'
+ cardinality = '?*'
+
+class Company(EntityType):
+ total_salary = Int(formula='Any SUM(SA) GROUPBY X WHERE '
+ 'P works_for X, P salary SA')
+
+class has_employee(ComputedRelation):
+ rule = 'O works_for S'
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/datacomputed/migratedapp/schema.py Fri Oct 17 18:16:58 2014 +0200
@@ -0,0 +1,57 @@
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+
+from yams.buildobjs import (EntityType, RelationDefinition, ComputedRelation,
+ Int, Float)
+
+
+class Employee(EntityType):
+ pass
+
+
+class employees(RelationDefinition):
+ subject = 'Company'
+ object = 'Employee'
+
+
+class associates(RelationDefinition):
+ subject = 'Company'
+ object = 'Employee'
+
+
+class works_for(ComputedRelation):
+ rule = 'O employees S, NOT EXISTS (O associates S)'
+
+
+class Company(EntityType):
+ score = Float(formula='Any AVG(NN) WHERE X employees E, N concerns E, N note NN')
+ score100 = Float(formula='Any AVG(NN) WHERE X employees E, N concerns E, N note100 NN')
+
+
+class Note(EntityType):
+ note = Int()
+ note100 = Int(formula='Any N*100 WHERE X note N')
+
+
+class concerns(RelationDefinition):
+ subject = 'Note'
+ object = 'Employee'
+
+
+class whatever(ComputedRelation):
+ rule = 'S employees E, O associates E'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/datacomputed/schema.py Fri Oct 17 18:16:58 2014 +0200
@@ -0,0 +1,54 @@
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+
+from yams.buildobjs import EntityType, RelationDefinition, ComputedRelation, Int, Float
+
+
+class Employee(EntityType):
+ pass
+
+
+class employees(RelationDefinition):
+ subject = 'Company'
+ object = 'Employee'
+
+
+class associates(RelationDefinition):
+ subject = 'Company'
+ object = 'Employee'
+
+
+class Company(EntityType):
+ score100 = Float(formula='Any AVG(NN) WHERE X employees E, N concerns E, N note100 NN')
+
+class Note(EntityType):
+ note = Int()
+ note20 = Int(formula='Any N*20 WHERE X note N')
+ note100 = Int(formula='Any N*20 WHERE X note N')
+
+class concerns(RelationDefinition):
+ subject = 'Note'
+ object = 'Employee'
+
+
+class notes(ComputedRelation):
+ rule = 'S employees E, O concerns E'
+
+
+class whatever(ComputedRelation):
+ rule = 'S employees E, O concerns E'
--- a/server/test/unittest_datafeed.py Tue Oct 07 10:06:24 2014 +0200
+++ b/server/test/unittest_datafeed.py Fri Oct 17 18:16:58 2014 +0200
@@ -16,7 +16,9 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+import mimetools
from datetime import timedelta
+from contextlib import contextmanager
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.server.sources import datafeed
@@ -25,19 +27,14 @@
class DataFeedTC(CubicWebTC):
def setup_database(self):
with self.admin_access.repo_cnx() as cnx:
- cnx.create_entity('CWSource', name=u'myfeed', type=u'datafeed',
- parser=u'testparser', url=u'ignored',
- config=u'synchronization-interval=1min')
- cnx.commit()
+ with self.base_parser(cnx):
+ cnx.create_entity('CWSource', name=u'myfeed', type=u'datafeed',
+ parser=u'testparser', url=u'ignored',
+ config=u'synchronization-interval=1min')
+ cnx.commit()
- def test(self):
- self.assertIn('myfeed', self.repo.sources_by_uri)
- dfsource = self.repo.sources_by_uri['myfeed']
- self.assertEqual(dfsource.latest_retrieval, None)
- self.assertEqual(dfsource.synchro_interval, timedelta(seconds=60))
- self.assertFalse(dfsource.fresh())
-
-
+ @contextmanager
+ def base_parser(self, session):
class AParser(datafeed.DataFeedParser):
__regid__ = 'testparser'
def process(self, url, raise_on_error=False):
@@ -50,7 +47,24 @@
entity.cw_edited.update(sourceparams['item'])
with self.temporary_appobjects(AParser):
- with self.repo.internal_cnx() as cnx:
+ if 'myfeed' in self.repo.sources_by_uri:
+ yield self.repo.sources_by_uri['myfeed']._get_parser(session)
+ else:
+ yield
+
+ def test(self):
+ self.assertIn('myfeed', self.repo.sources_by_uri)
+ dfsource = self.repo.sources_by_uri['myfeed']
+ self.assertNotIn('use_cwuri_as_url', dfsource.__dict__)
+ self.assertEqual({'type': u'datafeed', 'uri': u'myfeed', 'use-cwuri-as-url': True},
+ dfsource.public_config)
+ self.assertEqual(dfsource.use_cwuri_as_url, True)
+ self.assertEqual(dfsource.latest_retrieval, None)
+ self.assertEqual(dfsource.synchro_interval, timedelta(seconds=60))
+ self.assertFalse(dfsource.fresh())
+
+ with self.repo.internal_cnx() as cnx:
+ with self.base_parser(cnx):
stats = dfsource.pull_data(cnx, force=True)
cnx.commit()
# test import stats
@@ -119,6 +133,28 @@
self.assertFalse(cnx.execute('Card X WHERE X title "cubicweb.org"'))
self.assertFalse(cnx.execute('Any X WHERE X has_text "cubicweb.org"'))
+ def test_parser_retrieve_url_local(self):
+ with self.admin_access.repo_cnx() as cnx:
+ with self.base_parser(cnx) as parser:
+ value = parser.retrieve_url('a string')
+ self.assertEqual(200, value.getcode())
+ self.assertEqual('a string', value.geturl())
+ self.assertIsInstance(value.info(), mimetools.Message)
+
+
+class DataFeedConfigTC(CubicWebTC):
+
+ def test_use_cwuri_as_url_override(self):
+ with self.admin_access.client_cnx() as cnx:
+ cnx.create_entity('CWSource', name=u'myfeed', type=u'datafeed',
+ parser=u'testparser', url=u'ignored',
+ config=u'use-cwuri-as-url=no')
+ cnx.commit()
+ dfsource = self.repo.sources_by_uri['myfeed']
+ self.assertEqual(dfsource.use_cwuri_as_url, False)
+ self.assertEqual({'type': u'datafeed', 'uri': u'myfeed', 'use-cwuri-as-url': False},
+ dfsource.public_config)
+
if __name__ == '__main__':
from logilab.common.testlib import unittest_main
unittest_main()
--- a/server/test/unittest_migractions.py Tue Oct 07 10:06:24 2014 +0200
+++ b/server/test/unittest_migractions.py Fri Oct 17 18:16:58 2014 +0200
@@ -18,46 +18,50 @@
"""unit tests for module cubicweb.server.migractions"""
from datetime import date
-from os.path import join
+import os.path as osp
from contextlib import contextmanager
from logilab.common.testlib import unittest_main, Tags, tag
from yams.constraints import UniqueConstraint
-from cubicweb import ConfigurationError, ValidationError
+from cubicweb import ConfigurationError, ValidationError, ExecutionError
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.server.sqlutils import SQL_PREFIX
from cubicweb.server.migractions import ServerMigrationHelper
import cubicweb.devtools
+
+HERE = osp.dirname(osp.abspath(__file__))
+
migrschema = None
def tearDownModule(*args):
global migrschema
del migrschema
if hasattr(MigrationCommandsTC, 'origschema'):
del MigrationCommandsTC.origschema
+ if hasattr(MigrationCommandsComputedTC, 'origschema'):
+ del MigrationCommandsComputedTC.origschema
-class MigrationCommandsTC(CubicWebTC):
+class MigrationTC(CubicWebTC):
configcls = cubicweb.devtools.TestServerConfiguration
tags = CubicWebTC.tags | Tags(('server', 'migration', 'migractions'))
def _init_repo(self):
- super(MigrationCommandsTC, self)._init_repo()
+ super(MigrationTC, self)._init_repo()
# we have to read schema from the database to get eid for schema entities
self.repo.set_schema(self.repo.deserialize_schema(), resetvreg=False)
# hack to read the schema from data/migrschema
config = self.config
- config.appid = join('data', 'migratedapp')
- config._apphome = self.datapath('migratedapp')
+ config.appid = osp.join(self.appid, 'migratedapp')
+ config._apphome = osp.join(HERE, config.appid)
global migrschema
migrschema = config.load_schema()
- config.appid = 'data'
- config._apphome = self.datadir
- assert 'Folder' in migrschema
+ config.appid = self.appid
+ config._apphome = osp.join(HERE, self.appid)
def setUp(self):
CubicWebTC.setUp(self)
@@ -73,6 +77,26 @@
repo=self.repo, cnx=cnx,
interactive=False)
+ def table_sql(self, mh, tablename):
+ result = mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' "
+ "and name=%(table)s", {'table': tablename})
+ if result:
+ return result[0][0]
+ return None # no such table
+
+ def table_schema(self, mh, tablename):
+ sql = self.table_sql(mh, tablename)
+ assert sql, 'no table %s' % tablename
+ return dict(x.split()[:2]
+ for x in sql.split('(', 1)[1].rsplit(')', 1)[0].split(','))
+
+
+class MigrationCommandsTC(MigrationTC):
+
+ def _init_repo(self):
+ super(MigrationCommandsTC, self)._init_repo()
+ assert 'Folder' in migrschema
+
def test_add_attribute_bool(self):
with self.mh() as (cnx, mh):
self.assertNotIn('yesno', self.schema)
@@ -135,8 +159,7 @@
self.assertEqual(self.schema['shortpara'].subjects(), ('Note', ))
self.assertEqual(self.schema['shortpara'].objects(), ('String', ))
# test created column is actually a varchar(64)
- notesql = mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' and name='%sNote'" % SQL_PREFIX)[0][0]
- fields = dict(x.strip().split()[:2] for x in notesql.split('(', 1)[1].rsplit(')', 1)[0].split(','))
+ fields = self.table_schema(mh, '%sNote' % SQL_PREFIX)
self.assertEqual(fields['%sshortpara' % SQL_PREFIX], 'varchar(64)')
# test default value set on existing entities
self.assertEqual(cnx.execute('Note X').get_entity(0, 0).shortpara, 'hop')
@@ -656,16 +679,167 @@
self.assertEqual(self.schema['Note'].specializes(), None)
self.assertEqual(self.schema['Text'].specializes(), None)
-
def test_add_symmetric_relation_type(self):
with self.mh() as (cnx, mh):
- same_as_sql = mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' "
- "and name='same_as_relation'")
- self.assertFalse(same_as_sql)
+ self.assertFalse(self.table_sql(mh, 'same_as_relation'))
mh.cmd_add_relation_type('same_as')
- same_as_sql = mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' "
- "and name='same_as_relation'")
- self.assertTrue(same_as_sql)
+ self.assertTrue(self.table_sql(mh, 'same_as_relation'))
+
+
+class MigrationCommandsComputedTC(MigrationTC):
+ """ Unit tests for computed relations and attributes
+ """
+ appid = 'datacomputed'
+
+ def setUp(self):
+ MigrationTC.setUp(self)
+ # ensure vregistry is reloaded, needed by generated hooks for computed
+ # attributes
+ self.repo.vreg.set_schema(self.repo.schema)
+
+ def test_computed_relation_add_relation_definition(self):
+ self.assertNotIn('works_for', self.schema)
+ with self.mh() as (cnx, mh):
+ with self.assertRaises(ExecutionError) as exc:
+ mh.cmd_add_relation_definition('Employee', 'works_for',
+ 'Company')
+ self.assertEqual(str(exc.exception),
+ 'Cannot add a relation definition for a computed '
+ 'relation (works_for)')
+
+ def test_computed_relation_drop_relation_definition(self):
+ self.assertIn('notes', self.schema)
+ with self.mh() as (cnx, mh):
+ with self.assertRaises(ExecutionError) as exc:
+ mh.cmd_drop_relation_definition('Company', 'notes', 'Note')
+ self.assertEqual(str(exc.exception),
+ 'Cannot drop a relation definition for a computed '
+ 'relation (notes)')
+
+ def test_computed_relation_add_relation_type(self):
+ self.assertNotIn('works_for', self.schema)
+ with self.mh() as (cnx, mh):
+ mh.cmd_add_relation_type('works_for')
+ self.assertIn('works_for', self.schema)
+ self.assertEqual(self.schema['works_for'].rule,
+ 'O employees S, NOT EXISTS (O associates S)')
+ self.assertEqual(self.schema['works_for'].objects(), ('Company',))
+ self.assertEqual(self.schema['works_for'].subjects(), ('Employee',))
+ self.assertFalse(self.table_sql(mh, 'works_for_relation'))
+ e = cnx.create_entity('Employee')
+ a = cnx.create_entity('Employee')
+ cnx.create_entity('Company', employees=e, associates=a)
+ cnx.commit()
+ company = cnx.execute('Company X').get_entity(0, 0)
+ self.assertEqual([e.eid],
+ [x.eid for x in company.reverse_works_for])
+ mh.rollback()
+
+ def test_computed_relation_drop_relation_type(self):
+ self.assertIn('notes', self.schema)
+ with self.mh() as (cnx, mh):
+ mh.cmd_drop_relation_type('notes')
+ self.assertNotIn('notes', self.schema)
+
+ def test_computed_relation_sync_schema_props_perms(self):
+ self.assertIn('whatever', self.schema)
+ with self.mh() as (cnx, mh):
+ mh.cmd_sync_schema_props_perms('whatever')
+ self.assertEqual(self.schema['whatever'].rule,
+ 'S employees E, O associates E')
+ self.assertEqual(self.schema['whatever'].objects(), ('Company',))
+ self.assertEqual(self.schema['whatever'].subjects(), ('Company',))
+ self.assertFalse(self.table_sql(mh, 'whatever_relation'))
+
+ def test_computed_relation_sync_schema_props_perms_on_rdef(self):
+ self.assertIn('whatever', self.schema)
+ with self.mh() as (cnx, mh):
+ with self.assertRaises(ExecutionError) as exc:
+ mh.cmd_sync_schema_props_perms(
+ ('Company', 'whatever', 'Person'))
+ self.assertEqual(str(exc.exception),
+ 'Cannot synchronize a relation definition for a computed '
+ 'relation (whatever)')
+
+ # computed attributes migration ############################################
+
+ def setup_add_score(self):
+ with self.admin_access.client_cnx() as cnx:
+ assert not cnx.execute('Company X')
+ c = cnx.create_entity('Company')
+ e1 = cnx.create_entity('Employee', reverse_employees=c)
+ n1 = cnx.create_entity('Note', note=2, concerns=e1)
+ e2 = cnx.create_entity('Employee', reverse_employees=c)
+ n2 = cnx.create_entity('Note', note=4, concerns=e2)
+ cnx.commit()
+
+ def assert_score_initialized(self, mh):
+ self.assertEqual(self.schema['score'].rdefs['Company', 'Float'].formula,
+ 'Any AVG(NN) WHERE X employees E, N concerns E, N note NN')
+ fields = self.table_schema(mh, '%sCompany' % SQL_PREFIX)
+ self.assertEqual(fields['%sscore' % SQL_PREFIX], 'float')
+ self.assertEqual([[3.0]],
+ mh.rqlexec('Any CS WHERE C score CS, C is Company').rows)
+
+ def test_computed_attribute_add_relation_type(self):
+ self.assertNotIn('score', self.schema)
+ self.setup_add_score()
+ with self.mh() as (cnx, mh):
+ mh.cmd_add_relation_type('score')
+ self.assertIn('score', self.schema)
+ self.assertEqual(self.schema['score'].objects(), ('Float',))
+ self.assertEqual(self.schema['score'].subjects(), ('Company',))
+ self.assert_score_initialized(mh)
+
+ def test_computed_attribute_add_attribute(self):
+ self.assertNotIn('score', self.schema)
+ self.setup_add_score()
+ with self.mh() as (cnx, mh):
+ mh.cmd_add_attribute('Company', 'score')
+ self.assertIn('score', self.schema)
+ self.assert_score_initialized(mh)
+
+ def assert_computed_attribute_dropped(self):
+ self.assertNotIn('note20', self.schema)
+ # DROP COLUMN not supported by sqlite
+ #with self.mh() as (cnx, mh):
+ # fields = self.table_schema(mh, '%sNote' % SQL_PREFIX)
+ #self.assertNotIn('%snote20' % SQL_PREFIX, fields)
+
+ def test_computed_attribute_drop_type(self):
+ self.assertIn('note20', self.schema)
+ with self.mh() as (cnx, mh):
+ mh.cmd_drop_relation_type('note20')
+ self.assert_computed_attribute_dropped()
+
+ def test_computed_attribute_drop_relation_definition(self):
+ self.assertIn('note20', self.schema)
+ with self.mh() as (cnx, mh):
+ mh.cmd_drop_relation_definition('Note', 'note20', 'Int')
+ self.assert_computed_attribute_dropped()
+
+ def test_computed_attribute_drop_attribute(self):
+ self.assertIn('note20', self.schema)
+ with self.mh() as (cnx, mh):
+ mh.cmd_drop_attribute('Note', 'note20')
+ self.assert_computed_attribute_dropped()
+
+ def test_computed_attribute_sync_schema_props_perms_rtype(self):
+ self.assertIn('note100', self.schema)
+ with self.mh() as (cnx, mh):
+ mh.cmd_sync_schema_props_perms('note100')
+ self.assertEqual(self.schema['note100'].rdefs['Note', 'Int'].formula,
+ 'Any N*100 WHERE X note N')
+
+ def test_computed_attribute_sync_schema_props_perms_rdef(self):
+ self.setup_add_score()
+ with self.mh() as (cnx, mh):
+ mh.cmd_sync_schema_props_perms(('Note', 'note100', 'Int'))
+ self.assertEqual([[200], [400]],
+ cnx.execute('Any N ORDERBY N WHERE X note100 N').rows)
+ self.assertEqual([[300]],
+ cnx.execute('Any CS WHERE C score100 CS, C is Company').rows)
+
if __name__ == '__main__':
unittest_main()
--- a/server/test/unittest_querier.py Tue Oct 07 10:06:24 2014 +0200
+++ b/server/test/unittest_querier.py Fri Oct 17 18:16:58 2014 +0200
@@ -173,11 +173,11 @@
'ET': 'CWEType', 'ETN': 'String'}])
rql, solutions = partrqls[1]
self.assertRQLEqual(rql, 'Any ETN,X WHERE X is ET, ET name ETN, ET is CWEType, '
- 'X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, '
- ' CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, '
- ' CWRelation, CWSource, CWUniqueTogetherConstraint, CWUser, Card, Comment, '
- ' Division, Email, EmailPart, EmailThread, ExternalUri, File, Folder, Frozable, '
- ' Note, Old, Personne, RQLExpression, Societe, State, SubDivision, '
+ 'X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWComputedRType, '
+ ' CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, '
+ ' CWRType, CWRelation, CWSource, CWUniqueTogetherConstraint, CWUser, Card, '
+ ' Comment, Division, Email, EmailPart, EmailThread, ExternalUri, File, Folder, '
+ ' Frozable, Note, Old, Personne, RQLExpression, Societe, State, SubDivision, '
' SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)')
self.assertListEqual(sorted(solutions),
sorted([{'X': 'BaseTransition', 'ETN': 'String', 'ET': 'CWEType'},
@@ -186,6 +186,7 @@
{'X': 'Comment', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'Division', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'CWCache', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWComputedRType', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'CWConstraint', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'CWConstraintType', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'CWEType', 'ETN': 'String', 'ET': 'CWEType'},
@@ -603,18 +604,18 @@
'WHERE RT name N, RDEF relation_type RT '
'HAVING COUNT(RDEF) > 10')
self.assertListEqual(rset.rows,
- [[u'description_format', 12],
- [u'description', 13],
- [u'name', 18],
- [u'created_by', 44],
- [u'creation_date', 44],
- [u'cw_source', 44],
- [u'cwuri', 44],
- [u'in_basket', 44],
- [u'is', 44],
- [u'is_instance_of', 44],
- [u'modification_date', 44],
- [u'owned_by', 44]])
+ [[u'description_format', 13],
+ [u'description', 14],
+ [u'name', 19],
+ [u'created_by', 45],
+ [u'creation_date', 45],
+ [u'cw_source', 45],
+ [u'cwuri', 45],
+ [u'in_basket', 45],
+ [u'is', 45],
+ [u'is_instance_of', 45],
+ [u'modification_date', 45],
+ [u'owned_by', 45]])
def test_select_aggregat_having_dumb(self):
# dumb but should not raise an error
--- a/server/test/unittest_repository.py Tue Oct 07 10:06:24 2014 +0200
+++ b/server/test/unittest_repository.py Fri Oct 17 18:16:58 2014 +0200
@@ -280,7 +280,7 @@
self.assertListEqual(['relation_type',
'from_entity', 'to_entity',
'constrained_by',
- 'cardinality', 'ordernum',
+ 'cardinality', 'ordernum', 'formula',
'indexed', 'fulltextindexed', 'internationalizable',
'defaultval', 'extra_props',
'description', 'description_format'],
--- a/server/test/unittest_schemaserial.py Tue Oct 07 10:06:24 2014 +0200
+++ b/server/test/unittest_schemaserial.py Fri Oct 17 18:16:58 2014 +0200
@@ -25,6 +25,7 @@
from cubicweb import Binary
from cubicweb.schema import CubicWebSchemaLoader
from cubicweb.devtools import TestServerConfiguration
+from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.server.schemaserial import (updateeschema2rql, updaterschema2rql, rschema2rql,
eschema2rql, rdef2rql, specialize2rql,
@@ -221,7 +222,7 @@
'inlined': False}),
('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,'
- 'X description %(description)s,X fulltextindexed %(fulltextindexed)s,'
+ 'X description %(description)s,X formula %(formula)s,X fulltextindexed %(fulltextindexed)s,'
'X indexed %(indexed)s,X internationalizable %(internationalizable)s,'
'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,'
'X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
@@ -234,6 +235,7 @@
'ordernum': 5,
'defaultval': None,
'indexed': False,
+ 'formula': None,
'cardinality': u'?1'}),
('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X '
'WHERE CT eid %(ct)s, EDEF eid %(x)s',
@@ -247,7 +249,7 @@
'value': u"u'?1', u'11'"}),
('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,'
- 'X description %(description)s,X fulltextindexed %(fulltextindexed)s,'
+ 'X description %(description)s,X formula %(formula)s,X fulltextindexed %(fulltextindexed)s,'
'X indexed %(indexed)s,X internationalizable %(internationalizable)s,'
'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE '
'WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
@@ -260,6 +262,7 @@
'ordernum': 5,
'defaultval': None,
'indexed': False,
+ 'formula': None,
'cardinality': u'?1'}),
('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X '
'WHERE CT eid %(ct)s, EDEF eid %(x)s',
@@ -272,7 +275,7 @@
'ct': u'StaticVocabularyConstraint_eid',
'value': (u"u'?*', u'1*', u'+*', u'**', u'?+', u'1+', u'++', u'*+', u'?1', "
"u'11', u'+1', u'*1', u'??', u'1?', u'+?', u'*?'")})],
- list(rschema2rql(schema.rschema('cardinality'), cstrtypemap)))
+ list(rschema2rql(schema.rschema('cardinality'), cstrtypemap)))
def test_rschema2rql_custom_type(self):
expected = [('INSERT CWRType X: X description %(description)s,X final %(final)s,'
@@ -286,13 +289,14 @@
'symmetric': False}),
('INSERT CWAttribute X: X cardinality %(cardinality)s,'
'X defaultval %(defaultval)s,X description %(description)s,'
- 'X extra_props %(extra_props)s,X indexed %(indexed)s,'
+ 'X extra_props %(extra_props)s,X formula %(formula)s,X indexed %(indexed)s,'
'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,'
'X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
{'cardinality': u'?1',
'defaultval': None,
'description': u'',
'extra_props': '{"jungle_speed": 42}',
+ 'formula': None,
'indexed': False,
'oe': None,
'ordernum': 4,
@@ -312,7 +316,7 @@
def test_rdef2rql(self):
self.assertListEqual([
('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,'
- 'X description %(description)s,X fulltextindexed %(fulltextindexed)s,'
+ 'X description %(description)s,X formula %(formula)s,X fulltextindexed %(fulltextindexed)s,'
'X indexed %(indexed)s,X internationalizable %(internationalizable)s,'
'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,'
'X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
@@ -325,6 +329,7 @@
'ordernum': 3,
'defaultval': Binary.zpickle(u'text/plain'),
'indexed': False,
+ 'formula': None,
'cardinality': u'?1'}),
('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X '
'WHERE CT eid %(ct)s, EDEF eid %(x)s',
@@ -424,7 +429,19 @@
# self.assertListEqual(perms2rql(schema, self.GROUP_MAPPING),
# ['INSERT CWEType X: X name 'Societe', X final FALSE'])
+class ComputedAttributeAndRelationTC(CubicWebTC):
+ appid = 'data-cwep002'
+ def test(self):
+ # force to read schema from the database
+ self.repo.set_schema(self.repo.deserialize_schema(), resetvreg=False)
+ schema = self.repo.schema
+ self.assertEqual([('Company', 'Person')], list(schema['has_employee'].rdefs))
+ self.assertEqual('O works_for S',
+ schema['has_employee'].rule)
+ self.assertEqual([('Company', 'Int')], list(schema['total_salary'].rdefs))
+ self.assertEqual('Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA',
+ schema['total_salary'].rdefs['Company', 'Int'].formula)
if __name__ == '__main__':
unittest_main()
--- a/server/test/unittest_security.py Tue Oct 07 10:06:24 2014 +0200
+++ b/server/test/unittest_security.py Fri Oct 17 18:16:58 2014 +0200
@@ -22,7 +22,7 @@
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb import Unauthorized, ValidationError, QueryError, Binary
from cubicweb.schema import ERQLExpression
-from cubicweb.server.querier import check_read_access
+from cubicweb.server.querier import get_local_checks, check_relations_read_access
from cubicweb.server.utils import _CRYPTO_CTX
@@ -37,18 +37,33 @@
class LowLevelSecurityFunctionTC(BaseSecurityTC):
- def test_check_read_access(self):
- rql = u'Personne U where U nom "managers"'
+ def test_check_relation_read_access(self):
+ rql = u'Personne U WHERE U nom "managers"'
+ rqlst = self.repo.vreg.rqlhelper.parse(rql).children[0]
+ nom = self.repo.schema['Personne'].rdef('nom')
+ with self.temporary_permissions((nom, {'read': ('users', 'managers')})):
+ with self.admin_access.repo_cnx() as cnx:
+ self.repo.vreg.solutions(cnx, rqlst, None)
+ check_relations_read_access(cnx, rqlst, {})
+ with self.new_access('anon').repo_cnx() as cnx:
+ self.assertRaises(Unauthorized,
+ check_relations_read_access,
+ cnx, rqlst, {})
+ self.assertRaises(Unauthorized, cnx.execute, rql)
+
+ def test_get_local_checks(self):
+ rql = u'Personne U WHERE U nom "managers"'
rqlst = self.repo.vreg.rqlhelper.parse(rql).children[0]
with self.temporary_permissions(Personne={'read': ('users', 'managers')}):
with self.admin_access.repo_cnx() as cnx:
self.repo.vreg.solutions(cnx, rqlst, None)
solution = rqlst.solutions[0]
- check_read_access(cnx, rqlst, solution, {})
+ localchecks = get_local_checks(cnx, rqlst, solution)
+ self.assertEqual({}, localchecks)
with self.new_access('anon').repo_cnx() as cnx:
self.assertRaises(Unauthorized,
- check_read_access,
- cnx, rqlst, solution, {})
+ get_local_checks,
+ cnx, rqlst, solution)
self.assertRaises(Unauthorized, cnx.execute, rql)
def test_upassword_not_selectable(self):
--- a/server/test/unittest_undo.py Tue Oct 07 10:06:24 2014 +0200
+++ b/server/test/unittest_undo.py Fri Oct 17 18:16:58 2014 +0200
@@ -106,13 +106,20 @@
self.assertEqual(a4.eid_from, self.totoeid)
self.assertEqual(a4.eid_to, self.toto(cnx).in_group[0].eid)
self.assertEqual(a4.order, 4)
- for i, rtype in ((1, 'owned_by'), (2, 'owned_by'),
- (4, 'in_state'), (5, 'created_by')):
+ for i, rtype in ((1, 'owned_by'), (2, 'owned_by')):
a = actions[i]
self.assertEqual(a.action, 'A')
self.assertEqual(a.eid_from, self.totoeid)
self.assertEqual(a.rtype, rtype)
self.assertEqual(a.order, i+1)
+ self.assertEqual(set((actions[4].rtype, actions[5].rtype)),
+ set(('in_state', 'created_by')))
+ for i in (4, 5):
+ a = actions[i]
+ self.assertEqual(a.action, 'A')
+ self.assertEqual(a.eid_from, self.totoeid)
+ self.assertEqual(a.order, i+1)
+
# test undoable_transactions
txs = cnx.undoable_transactions()
self.assertEqual(len(txs), 1)
--- a/sobjects/notification.py Tue Oct 07 10:06:24 2014 +0200
+++ b/sobjects/notification.py Fri Oct 17 18:16:58 2014 +0200
@@ -80,15 +80,8 @@
# this is usually the method to call
def render_and_send(self, **kwargs):
- """generate and send an email message for this view"""
- delayed = kwargs.pop('delay_to_commit', None)
- for recipients, msg in self.render_emails(**kwargs):
- if delayed is None:
- self.send(recipients, msg)
- elif delayed:
- self.send_on_commit(recipients, msg)
- else:
- self.send_now(recipients, msg)
+ """generate and send email messages for this view"""
+ self._cw.vreg.config.sendmails(self.render_emails(**kwargs))
def cell_call(self, row, col=0, **kwargs):
self.w(self._cw._(self.content) % self.context(**kwargs))
@@ -146,16 +139,11 @@
continue
msg = format_mail(self.user_data, [emailaddr], content, subject,
config=self._cw.vreg.config, msgid=msgid, references=refs)
- yield [emailaddr], msg
+ yield msg, [emailaddr]
finally:
- # ensure we have a cnxset since commit will fail if there is
- # some operation but no cnxset. This may occurs in this very
- # specific case (eg SendMailOp)
- with cnx.ensure_cnx_set:
- cnx.commit()
self._cw = req
- # recipients / email sending ###############################################
+ # recipients handling ######################################################
def recipients(self):
"""return a list of either 2-uple (email, language) or user entity to
@@ -166,13 +154,6 @@
row=self.cw_row or 0, col=self.cw_col or 0)
return finder.recipients()
- def send_now(self, recipients, msg):
- self._cw.vreg.config.sendmails([(msg, recipients)])
-
- def send_on_commit(self, recipients, msg):
- SendMailOp(self._cw, recipients=recipients, msg=msg)
- send = send_on_commit
-
# email generation helpers #################################################
def construct_message_id(self, eid):
--- a/test/data/rewrite/schema.py Tue Oct 07 10:06:24 2014 +0200
+++ b/test/data/rewrite/schema.py Fri Oct 17 18:16:58 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -15,9 +15,15 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
-from yams.buildobjs import EntityType, RelationDefinition, String, SubjectRelation
+from yams.buildobjs import (EntityType, RelationDefinition, String, SubjectRelation,
+ ComputedRelation, Int)
from cubicweb.schema import ERQLExpression
+
+class Person(EntityType):
+ name = String()
+
+
class Affaire(EntityType):
__permissions__ = {
'read': ('managers',
@@ -82,3 +88,37 @@
object = 'CWUser'
inlined = True
cardinality = '1*'
+
+class Contribution(EntityType):
+ code = Int()
+
+class ArtWork(EntityType):
+ name = String()
+
+class Role(EntityType):
+ name = String()
+
+class contributor(RelationDefinition):
+ subject = 'Contribution'
+ object = 'Person'
+ cardinality = '1*'
+ inlined = True
+
+class manifestation(RelationDefinition):
+ subject = 'Contribution'
+ object = 'ArtWork'
+
+class role(RelationDefinition):
+ subject = 'Contribution'
+ object = 'Role'
+
+class illustrator_of(ComputedRelation):
+ rule = ('C is Contribution, C contributor S, C manifestation O, '
+ 'C role R, R name "illustrator"')
+
+class participated_in(ComputedRelation):
+ rule = 'S contributor O'
+
+class match(RelationDefinition):
+ subject = 'ArtWork'
+ object = 'Note'
--- a/test/unittest_dataimport.py Tue Oct 07 10:06:24 2014 +0200
+++ b/test/unittest_dataimport.py Fri Oct 17 18:16:58 2014 +0200
@@ -1,6 +1,88 @@
+# -*- coding: utf-8 -*-
+import datetime as DT
from StringIO import StringIO
from logilab.common.testlib import TestCase, unittest_main
from cubicweb import dataimport
+from cubicweb.devtools.testlib import CubicWebTC
+
+
+class RQLObjectStoreTC(CubicWebTC):
+
+ def test_all(self):
+ with self.admin_access.repo_cnx() as cnx:
+ store = dataimport.RQLObjectStore(cnx)
+ group_eid = store.create_entity('CWGroup', name=u'grp').eid
+ user_eid = store.create_entity('CWUser', login=u'lgn', upassword=u'pwd').eid
+ store.relate(user_eid, 'in_group', group_eid)
+ cnx.commit()
+
+ with self.admin_access.repo_cnx() as cnx:
+ users = cnx.execute('CWUser X WHERE X login "lgn"')
+ self.assertEqual(1, len(users))
+ self.assertEqual(user_eid, users.one().eid)
+ groups = cnx.execute('CWGroup X WHERE U in_group X, U login "lgn"')
+ self.assertEqual(1, len(users))
+ self.assertEqual(group_eid, groups.one().eid)
+
+class CreateCopyFromBufferTC(TestCase):
+
+ # test converters
+
+ def test_convert_none(self):
+ cnvt = dataimport._copyfrom_buffer_convert_None
+ self.assertEqual('NULL', cnvt(None))
+
+ def test_convert_number(self):
+ cnvt = dataimport._copyfrom_buffer_convert_number
+ self.assertEqual('42', cnvt(42))
+ self.assertEqual('42', cnvt(42L))
+ self.assertEqual('42.42', cnvt(42.42))
+
+ def test_convert_string(self):
+ cnvt = dataimport._copyfrom_buffer_convert_string
+ # simple
+ self.assertEqual('babar', cnvt('babar'))
+ # unicode
+ self.assertEqual('\xc3\xa9l\xc3\xa9phant', cnvt(u'éléphant'))
+ self.assertEqual('\xe9l\xe9phant', cnvt(u'éléphant', encoding='latin1'))
+ self.assertEqual('babar#', cnvt('babar\t', replace_sep='#'))
+ self.assertRaises(ValueError, cnvt, 'babar\t')
+
+ def test_convert_date(self):
+ cnvt = dataimport._copyfrom_buffer_convert_date
+ self.assertEqual('0666-01-13', cnvt(DT.date(666, 1, 13)))
+
+ def test_convert_time(self):
+ cnvt = dataimport._copyfrom_buffer_convert_time
+ self.assertEqual('06:06:06.000100', cnvt(DT.time(6, 6, 6, 100)))
+
+ def test_convert_datetime(self):
+ cnvt = dataimport._copyfrom_buffer_convert_datetime
+ self.assertEqual('0666-06-13 06:06:06.000000', cnvt(DT.datetime(666, 6, 13, 6, 6, 6)))
+
+ # test buffer
+ def test_create_copyfrom_buffer_tuple(self):
+ cnvt = dataimport._create_copyfrom_buffer
+ data = ((42, 42L, 42.42, u'éléphant', DT.date(666, 1, 13), DT.time(6, 6, 6), DT.datetime(666, 6, 13, 6, 6, 6)),
+ (6, 6L, 6.6, u'babar', DT.date(2014, 1, 14), DT.time(4, 2, 1), DT.datetime(2014, 1, 1, 0, 0, 0)))
+ results = dataimport._create_copyfrom_buffer(data)
+ # all columns
+ expected = '''42\t42\t42.42\téléphant\t0666-01-13\t06:06:06.000000\t0666-06-13 06:06:06.000000
+6\t6\t6.6\tbabar\t2014-01-14\t04:02:01.000000\t2014-01-01 00:00:00.000000'''
+ self.assertMultiLineEqual(expected, results.getvalue())
+ # selected columns
+ results = dataimport._create_copyfrom_buffer(data, columns=(1, 3, 6))
+ expected = '''42\téléphant\t0666-06-13 06:06:06.000000
+6\tbabar\t2014-01-01 00:00:00.000000'''
+ self.assertMultiLineEqual(expected, results.getvalue())
+
+ def test_create_copyfrom_buffer_dict(self):
+ cnvt = dataimport._create_copyfrom_buffer
+ data = (dict(integer=42, double=42.42, text=u'éléphant', date=DT.datetime(666, 6, 13, 6, 6, 6)),
+ dict(integer=6, double=6.6, text=u'babar', date=DT.datetime(2014, 1, 1, 0, 0, 0)))
+ results = dataimport._create_copyfrom_buffer(data, ('integer', 'text'))
+ expected = '''42\téléphant\n6\tbabar'''
+ self.assertMultiLineEqual(expected, results.getvalue())
class UcsvreaderTC(TestCase):
--- a/test/unittest_dbapi.py Tue Oct 07 10:06:24 2014 +0200
+++ b/test/unittest_dbapi.py Fri Oct 17 18:16:58 2014 +0200
@@ -78,7 +78,7 @@
with tempattr(cnx.vreg, 'config', config):
cnx.use_web_compatible_requests('http://perdu.com')
req = cnx.request()
- self.assertEqual(req.base_url(), 'http://perdu.com')
+ self.assertEqual(req.base_url(), 'http://perdu.com/')
self.assertEqual(req.from_controller(), 'view')
self.assertEqual(req.relative_path(), '')
req.ajax_replace_url('domid') # don't crash
--- a/test/unittest_rqlrewrite.py Tue Oct 07 10:06:24 2014 +0200
+++ b/test/unittest_rqlrewrite.py Fri Oct 17 18:16:58 2014 +0200
@@ -19,6 +19,7 @@
from logilab.common.testlib import unittest_main, TestCase
from logilab.common.testlib import mock_object
from yams import BadSchemaDefinition
+from yams.buildobjs import RelationDefinition
from rql import parse, nodes, RQLHelper
from cubicweb import Unauthorized, rqlrewrite
@@ -31,10 +32,8 @@
config = TestServerConfiguration(RQLRewriteTC.datapath('rewrite'))
config.bootstrap_cubes()
schema = config.load_schema()
- from yams.buildobjs import RelationDefinition
schema.add_relation_def(RelationDefinition(subject='Card', name='in_state',
object='State', cardinality='1*'))
-
rqlhelper = RQLHelper(schema, special_relations={'eid': 'uid',
'has_text': 'fti'})
repotest.do_monkey_patch()
@@ -49,11 +48,11 @@
2: 'Card',
3: 'Affaire'}[eid]
-def rewrite(rqlst, snippets_map, kwargs, existingvars=None):
+def _prepare_rewriter(rewriter_cls, kwargs):
class FakeVReg:
schema = schema
@staticmethod
- def solutions(sqlcursor, mainrqlst, kwargs):
+ def solutions(sqlcursor, rqlst, kwargs):
rqlhelper.compute_solutions(rqlst, {'eid': eid_func_map}, kwargs=kwargs)
class rqlhelper:
@staticmethod
@@ -62,8 +61,10 @@
@staticmethod
def simplify(mainrqlst, needcopy=False):
rqlhelper.simplify(rqlst, needcopy)
- rewriter = rqlrewrite.RQLRewriter(
- mock_object(vreg=FakeVReg, user=(mock_object(eid=1))))
+ return rewriter_cls(mock_object(vreg=FakeVReg, user=(mock_object(eid=1))))
+
+def rewrite(rqlst, snippets_map, kwargs, existingvars=None):
+ rewriter = _prepare_rewriter(rqlrewrite.RQLRewriter, kwargs)
snippets = []
for v, exprs in sorted(snippets_map.items()):
rqlexprs = [isinstance(snippet, basestring)
@@ -87,7 +88,7 @@
except KeyError:
vrefmaps[stmt] = {vref.name: set( (vref,) )}
selects.append(stmt)
- assert node in selects
+ assert node in selects, (node, selects)
for stmt in selects:
for var in stmt.defined_vars.itervalues():
assert var.stinfo['references']
@@ -591,5 +592,223 @@
finally:
RQLRewriter.insert_snippets = orig_insert_snippets
+
+class RQLRelationRewriterTC(TestCase):
+ # XXX valid rules: S and O specified, not in a SET, INSERT, DELETE scope
+ # valid uses: no outer join
+
+ # Basic tests
+ def test_base_rule(self):
+ rules = {'participated_in': 'S contributor O'}
+ rqlst = rqlhelper.parse('Any X WHERE X participated_in S')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('Any X WHERE X contributor S',
+ rqlst.as_string())
+
+ def test_complex_rule_1(self):
+ rules = {'illustrator_of': ('C is Contribution, C contributor S, '
+ 'C manifestation O, C role R, '
+ 'R name "illustrator"')}
+ rqlst = rqlhelper.parse('Any A,B WHERE A illustrator_of B')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('Any A,B WHERE C is Contribution, '
+ 'C contributor A, C manifestation B, '
+ 'C role D, D name "illustrator"',
+ rqlst.as_string())
+
+ def test_complex_rule_2(self):
+ rules = {'illustrator_of': ('C is Contribution, C contributor S, '
+ 'C manifestation O, C role R, '
+ 'R name "illustrator"')}
+ rqlst = rqlhelper.parse('Any A WHERE EXISTS(A illustrator_of B)')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('Any A WHERE EXISTS(C is Contribution, '
+ 'C contributor A, C manifestation B, '
+ 'C role D, D name "illustrator")',
+ rqlst.as_string())
+
+
+ def test_rewrite2(self):
+ rules = {'illustrator_of': 'C is Contribution, C contributor S, '
+ 'C manifestation O, C role R, R name "illustrator"'}
+ rqlst = rqlhelper.parse('Any A,B WHERE A illustrator_of B, C require_permission R, S'
+ 'require_state O')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('Any A,B WHERE C require_permission R, S require_state O, '
+ 'D is Contribution, D contributor A, D manifestation B, D role E, '
+ 'E name "illustrator"',
+ rqlst.as_string())
+
+ def test_rewrite3(self):
+ rules = {'illustrator_of': 'C is Contribution, C contributor S, '
+ 'C manifestation O, C role R, R name "illustrator"'}
+ rqlst = rqlhelper.parse('Any A,B WHERE E require_permission T, A illustrator_of B')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('Any A,B WHERE E require_permission T, '
+ 'C is Contribution, C contributor A, C manifestation B, '
+ 'C role D, D name "illustrator"',
+ rqlst.as_string())
+
+ def test_rewrite4(self):
+ rules = {'illustrator_of': 'C is Contribution, C contributor S, '
+ 'C manifestation O, C role R, R name "illustrator"'}
+ rqlst = rqlhelper.parse('Any A,B WHERE C require_permission R, A illustrator_of B')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('Any A,B WHERE C require_permission R, '
+ 'D is Contribution, D contributor A, D manifestation B, '
+ 'D role E, E name "illustrator"',
+ rqlst.as_string())
+
+ def test_rewrite5(self):
+ rules = {'illustrator_of': 'C is Contribution, C contributor S, '
+ 'C manifestation O, C role R, R name "illustrator"'}
+ rqlst = rqlhelper.parse('Any A,B WHERE C require_permission R, A illustrator_of B, '
+ 'S require_state O')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('Any A,B WHERE C require_permission R, S require_state O, '
+ 'D is Contribution, D contributor A, D manifestation B, D role E, '
+ 'E name "illustrator"',
+ rqlst.as_string())
+
+ # Tests for the with clause
+ def test_rewrite_with(self):
+ rules = {'illustrator_of': 'C is Contribution, C contributor S, '
+ 'C manifestation O, C role R, R name "illustrator"'}
+ rqlst = rqlhelper.parse('Any A,B WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('Any A,B WITH A,B BEING '
+ '(Any X,Y WHERE A is Contribution, A contributor X, '
+ 'A manifestation Y, A role B, B name "illustrator")',
+ rqlst.as_string())
+
+ def test_rewrite_with2(self):
+ rules = {'illustrator_of': 'C is Contribution, C contributor S, '
+ 'C manifestation O, C role R, R name "illustrator"'}
+ rqlst = rqlhelper.parse('Any A,B WHERE T require_permission C WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('Any A,B WHERE T require_permission C '
+ 'WITH A,B BEING (Any X,Y WHERE A is Contribution, '
+ 'A contributor X, A manifestation Y, A role B, B name "illustrator")',
+ rqlst.as_string())
+
+ def test_rewrite_with3(self):
+ rules = {'participated_in': 'S contributor O'}
+ rqlst = rqlhelper.parse('Any A,B WHERE A participated_in B '
+ 'WITH A, B BEING(Any X,Y WHERE X contributor Y)')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('Any A,B WHERE A contributor B WITH A,B BEING '
+ '(Any X,Y WHERE X contributor Y)',
+ rqlst.as_string())
+
+ def test_rewrite_with4(self):
+ rules = {'illustrator_of': 'C is Contribution, C contributor S, '
+ 'C manifestation O, C role R, R name "illustrator"'}
+ rqlst = rqlhelper.parse('Any A,B WHERE A illustrator_of B '
+ 'WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('Any A,B WHERE C is Contribution, '
+ 'C contributor A, C manifestation B, C role D, '
+ 'D name "illustrator" WITH A,B BEING '
+ '(Any X,Y WHERE A is Contribution, A contributor X, '
+ 'A manifestation Y, A role B, B name "illustrator")',
+ rqlst.as_string())
+
+ # Tests for the union
+ def test_rewrite_union(self):
+ rules = {'illustrator_of': 'C is Contribution, C contributor S, '
+ 'C manifestation O, C role R, R name "illustrator"'}
+ rqlst = rqlhelper.parse('(Any A,B WHERE A illustrator_of B) UNION'
+ '(Any X,Y WHERE X is CWUser, Z manifestation Y)')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('(Any A,B WHERE C is Contribution, '
+ 'C contributor A, C manifestation B, C role D, '
+ 'D name "illustrator") UNION (Any X,Y WHERE X is CWUser, Z manifestation Y)',
+ rqlst.as_string())
+
+ def test_rewrite_union2(self):
+ rules = {'illustrator_of': 'C is Contribution, C contributor S, '
+ 'C manifestation O, C role R, R name "illustrator"'}
+ rqlst = rqlhelper.parse('(Any Y WHERE Y match W) UNION '
+ '(Any A WHERE A illustrator_of B) UNION '
+ '(Any Y WHERE Y is ArtWork)')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('(Any Y WHERE Y match W) '
+ 'UNION (Any A WHERE C is Contribution, C contributor A, '
+ 'C manifestation B, C role D, D name "illustrator") '
+ 'UNION (Any Y WHERE Y is ArtWork)',
+ rqlst.as_string())
+
+ # Tests for the exists clause
+ def test_rewrite_exists(self):
+ rules = {'illustrator_of': 'C is Contribution, C contributor S, '
+ 'C manifestation O, C role R, R name "illustrator"'}
+ rqlst = rqlhelper.parse('(Any A,B WHERE A illustrator_of B, '
+ 'EXISTS(B is ArtWork))')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('Any A,B WHERE EXISTS(B is ArtWork), '
+ 'C is Contribution, C contributor A, C manifestation B, C role D, '
+ 'D name "illustrator"',
+ rqlst.as_string())
+
+ def test_rewrite_exists2(self):
+ rules = {'illustrator_of': 'C is Contribution, C contributor S, '
+ 'C manifestation O, C role R, R name "illustrator"'}
+ rqlst = rqlhelper.parse('(Any A,B WHERE B contributor A, EXISTS(A illustrator_of W))')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('Any A,B WHERE B contributor A, '
+ 'EXISTS(C is Contribution, C contributor A, C manifestation W, '
+ 'C role D, D name "illustrator")',
+ rqlst.as_string())
+
+ def test_rewrite_exists3(self):
+ rules = {'illustrator_of': 'C is Contribution, C contributor S, '
+ 'C manifestation O, C role R, R name "illustrator"'}
+ rqlst = rqlhelper.parse('(Any A,B WHERE A illustrator_of B, EXISTS(A illustrator_of W))')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('Any A,B WHERE EXISTS(C is Contribution, C contributor A, '
+ 'C manifestation W, C role D, D name "illustrator"), '
+ 'E is Contribution, E contributor A, E manifestation B, E role F, '
+ 'F name "illustrator"',
+ rqlst.as_string())
+
+ # Test for GROUPBY
+ def test_rewrite_groupby(self):
+ rules = {'participated_in': 'S contributor O'}
+ rqlst = rqlhelper.parse('Any SUM(SA) GROUPBY S WHERE P participated_in S, P manifestation SA')
+ rule_rewrite(rqlst, rules)
+ self.assertEqual('Any SUM(SA) GROUPBY S WHERE P manifestation SA, P contributor S',
+ rqlst.as_string())
+
+
+class RQLRelationRewriterTC(CubicWebTC):
+
+ appid = 'data/rewrite'
+
+ def test_base_rule(self):
+ with self.admin_access.client_cnx() as cnx:
+ art = cnx.create_entity('ArtWork', name=u'Les travailleurs de la Mer')
+ role = cnx.create_entity('Role', name=u'illustrator')
+ vic = cnx.create_entity('Person', name=u'Victor Hugo')
+ contrib = cnx.create_entity('Contribution', code=96, contributor=vic,
+ manifestation=art, role=role)
+ rset = cnx.execute('Any X WHERE X illustrator_of S')
+ self.assertEqual([u'Victor Hugo'],
+ [result.name for result in rset.entities()])
+ rset = cnx.execute('Any S WHERE X illustrator_of S, X eid %(x)s',
+ {'x': vic.eid})
+ self.assertEqual([u'Les travailleurs de la Mer'],
+ [result.name for result in rset.entities()])
+
+
+def rule_rewrite(rqlst, kwargs=None):
+ rewriter = _prepare_rewriter(rqlrewrite.RQLRelationRewriter, kwargs)
+ rqlhelper.compute_solutions(rqlst.children[0], {'eid': eid_func_map},
+ kwargs=kwargs)
+ rewriter.rewrite(rqlst)
+ for select in rqlst.children:
+ test_vrefs(select)
+ return rewriter.rewritten
+
+
if __name__ == '__main__':
unittest_main()
--- a/test/unittest_schema.py Tue Oct 07 10:06:24 2014 +0200
+++ b/test/unittest_schema.py Fri Oct 17 18:16:58 2014 +0200
@@ -26,14 +26,16 @@
from yams import ValidationError, BadSchemaDefinition
from yams.constraints import SizeConstraint, StaticVocabularyConstraint
-from yams.buildobjs import RelationDefinition, EntityType, RelationType
+from yams.buildobjs import (RelationDefinition, EntityType, RelationType,
+ Int, String, SubjectRelation, ComputedRelation)
from yams.reader import fill_schema
from cubicweb.schema import (
CubicWebSchema, CubicWebEntitySchema, CubicWebSchemaLoader,
RQLConstraint, RQLUniqueConstraint, RQLVocabularyConstraint,
RQLExpression, ERQLExpression, RRQLExpression,
- normalize_expression, order_eschemas, guess_rrqlexpr_mainvars)
+ normalize_expression, order_eschemas, guess_rrqlexpr_mainvars,
+ build_schema_from_namespace)
from cubicweb.devtools import TestServerConfiguration as TestConfiguration
from cubicweb.devtools.testlib import CubicWebTC
@@ -161,9 +163,10 @@
entities = sorted([str(e) for e in schema.entities()])
expected_entities = ['Ami', 'BaseTransition', 'BigInt', 'Bookmark', 'Boolean', 'Bytes', 'Card',
'Date', 'Datetime', 'Decimal',
- 'CWCache', 'CWConstraint', 'CWConstraintType', 'CWDataImport',
- 'CWEType', 'CWAttribute', 'CWGroup', 'EmailAddress', 'CWRelation',
- 'CWPermission', 'CWProperty', 'CWRType',
+ 'CWCache', 'CWComputedRType', 'CWConstraint',
+ 'CWConstraintType', 'CWDataImport', 'CWEType',
+ 'CWAttribute', 'CWGroup', 'EmailAddress',
+ 'CWRelation', 'CWPermission', 'CWProperty', 'CWRType',
'CWSource', 'CWSourceHostConfig', 'CWSourceSchemaConfig',
'CWUniqueTogetherConstraint', 'CWUser',
'ExternalUri', 'File', 'Float', 'Int', 'Interval', 'Note',
@@ -190,7 +193,7 @@
'ean', 'ecrit_par', 'eid', 'end_timestamp', 'evaluee', 'expression', 'exprtype', 'extra_props',
- 'fabrique_par', 'final', 'firstname', 'for_user', 'fournit',
+ 'fabrique_par', 'final', 'firstname', 'for_user', 'formula', 'fournit',
'from_entity', 'from_state', 'fulltext_container', 'fulltextindexed',
'has_group_permission', 'has_text',
@@ -207,7 +210,7 @@
'parser', 'path', 'pkey', 'prefered_form', 'prenom', 'primary_email',
- 'read_permission', 'relation_type', 'relations', 'require_group',
+ 'read_permission', 'relation_type', 'relations', 'require_group', 'rule',
'specializes', 'start_timestamp', 'state_of', 'status', 'subworkflow', 'subworkflow_exit', 'subworkflow_state', 'surname', 'symmetric', 'synopsis',
@@ -281,6 +284,88 @@
'add': ('managers',),
'delete': ('managers',)})
+ def test_computed_attribute(self):
+ """Check schema finalization for computed attributes."""
+ class Person(EntityType):
+ salary = Int()
+
+ class works_for(RelationDefinition):
+ subject = 'Person'
+ object = 'Company'
+ cardinality = '?*'
+
+ class Company(EntityType):
+ total_salary = Int(formula='Any SUM(SA) GROUPBY X WHERE '
+ 'P works_for X, P salary SA')
+ good_schema = build_schema_from_namespace(vars().items())
+
+ class Company(EntityType):
+ total_salary = String(formula='Any SUM(SA) GROUPBY X WHERE '
+ 'P works_for X, P salary SA')
+
+ with self.assertRaises(BadSchemaDefinition) as exc:
+ bad_schema = build_schema_from_namespace(vars().items())
+
+ self.assertEqual(str(exc.exception),
+ 'computed attribute total_salary on Company: '
+ 'computed attribute type (Int) mismatch with '
+ 'specified type (String)')
+
+
+class SchemaReaderComputedRelationAndAttributesTest(TestCase):
+
+ def test_infer_computed_relation(self):
+ class Person(EntityType):
+ name = String()
+
+ class Company(EntityType):
+ name = String()
+
+ class Service(EntityType):
+ name = String()
+
+ class works_for(RelationDefinition):
+ subject = 'Person'
+ object = 'Company'
+
+ class produce(RelationDefinition):
+ subject = ('Person', 'Company')
+ object = 'Service'
+
+ class achete(RelationDefinition):
+ subject = 'Person'
+ object = 'Service'
+
+ class produces_and_buys(ComputedRelation):
+ rule = 'S produce O, S achete O'
+
+ class produces_and_buys2(ComputedRelation):
+ rule = 'S works_for SO, SO produce O'
+
+ class reproduce(ComputedRelation):
+ rule = 'S produce O'
+
+ schema = build_schema_from_namespace(vars().items())
+
+ # check object/subject type
+ self.assertEqual([('Person','Service')],
+ schema['produces_and_buys'].rdefs.keys())
+ self.assertEqual([('Person','Service')],
+ schema['produces_and_buys2'].rdefs.keys())
+ self.assertEqual([('Company', 'Service'), ('Person', 'Service')],
+ schema['reproduce'].rdefs.keys())
+ # check relations as marked infered
+ self.assertTrue(
+ schema['produces_and_buys'].rdefs[('Person','Service')].infered)
+
+ del schema
+ class autoname(ComputedRelation):
+ rule = 'S produce X, X name O'
+
+ with self.assertRaises(BadSchemaDefinition) as cm:
+ build_schema_from_namespace(vars().items())
+ self.assertEqual(str(cm.exception), 'computed relations cannot be final')
+
class BadSchemaTC(TestCase):
def setUp(self):
@@ -395,6 +480,7 @@
('cw_source', 'Bookmark', 'CWSource', 'object'),
('cw_source', 'CWAttribute', 'CWSource', 'object'),
('cw_source', 'CWCache', 'CWSource', 'object'),
+ ('cw_source', 'CWComputedRType', 'CWSource', 'object'),
('cw_source', 'CWConstraint', 'CWSource', 'object'),
('cw_source', 'CWConstraintType', 'CWSource', 'object'),
('cw_source', 'CWDataImport', 'CWSource', 'object'),
@@ -454,5 +540,6 @@
sorted([(r.rtype.type, r.subject.type, r.object.type, role)
for r, role in sorted(schema[etype].composite_rdef_roles)])
+
if __name__ == '__main__':
unittest_main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/unittest_toolsutils.py Fri Oct 17 18:16:58 2014 +0200
@@ -0,0 +1,57 @@
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.toolsutils import RQLExecuteMatcher
+
+
+class RQLExecuteMatcherTests(TestCase):
+ def matched_query(self, text):
+ match = RQLExecuteMatcher.match(text)
+ if match is None:
+ return None
+ return match['rql_query']
+
+ def test_unknown_function_dont_match(self):
+ self.assertIsNone(self.matched_query('foo'))
+ self.assertIsNone(self.matched_query('rql('))
+ self.assertIsNone(self.matched_query('hell("")'))
+ self.assertIsNone(self.matched_query('eval("rql(\'bla\''))
+
+ def test_rql_other_parameters_dont_match(self):
+ self.assertIsNone(self.matched_query('rql("Any X WHERE X eid %(x)s")'))
+ self.assertIsNone(self.matched_query('rql("Any X WHERE X eid %(x)s", {'))
+ self.assertIsNone(self.matched_query('session.execute("Any X WHERE X eid %(x)s")'))
+ self.assertIsNone(self.matched_query('session.execute("Any X WHERE X eid %(x)s", {'))
+
+ def test_rql_function_match(self):
+ for func_expr in ('rql', 'session.execute'):
+ query = self.matched_query('%s("Any X WHERE X is ' % func_expr)
+ self.assertEqual(query, 'Any X WHERE X is ')
+
+ def test_offseted_rql_function_match(self):
+ """check indentation is allowed"""
+ for func_expr in (' rql', ' session.execute'):
+ query = self.matched_query('%s("Any X WHERE X is ' % func_expr)
+ self.assertEqual(query, 'Any X WHERE X is ')
+
+
+if __name__ == '__main__':
+ unittest_main()
--- a/toolsutils.py Tue Oct 07 10:06:24 2014 +0200
+++ b/toolsutils.py Fri Oct 17 18:16:58 2014 +0200
@@ -25,7 +25,12 @@
import subprocess
from os import listdir, makedirs, environ, chmod, walk, remove
from os.path import exists, join, abspath, normpath
-
+import re
+from rlcompleter import Completer
+try:
+ import readline
+except ImportError: # readline not available, no completion
+ pass
try:
from os import symlink
except ImportError:
@@ -263,3 +268,155 @@
password = getpass('password: ')
return connect(login=user, password=password, host=optconfig.host, database=appid)
+
+## cwshell helpers #############################################################
+
+class AbstractMatcher(object):
+ """Abstract class for CWShellCompleter's matchers.
+
+ A matcher should implement a ``possible_matches`` method. This
+ method has to return the list of possible completions for user's input.
+ Because of the python / readline interaction, each completion should
+ be a superset of the user's input.
+
+ NOTE: readline tokenizes user's input and only passes last token to
+ completers.
+ """
+
+ def possible_matches(self, text):
+ """return possible completions for user's input.
+
+ Parameters:
+ text: the user's input
+
+ Return:
+ a list of completions. Each completion includes the original input.
+ """
+ raise NotImplementedError()
+
+
+class RQLExecuteMatcher(AbstractMatcher):
+ """Custom matcher for rql queries.
+
+ If user's input starts with ``rql(`` or ``session.execute(`` and
+ the corresponding rql query is incomplete, suggest some valid completions.
+ """
+ query_match_rgx = re.compile(
+ r'(?P<func_prefix>\s*(?:rql)' # match rql, possibly indented
+ r'|' # or
+ r'\s*(?:\w+\.execute))' # match .execute, possibly indented
+ # end of <func_prefix>
+ r'\(' # followed by a parenthesis
+ r'(?P<quote_delim>["\'])' # a quote or double quote
+ r'(?P<parameters>.*)') # and some content
+
+ def __init__(self, local_ctx, req):
+ self.local_ctx = local_ctx
+ self.req = req
+ self.schema = req.vreg.schema
+ self.rsb = req.vreg['components'].select('rql.suggestions', req)
+
+ @staticmethod
+ def match(text):
+ """check if ``text`` looks like a call to ``rql`` or ``session.execute``
+
+ Parameters:
+ text: the user's input
+
+ Returns:
+ None if it doesn't match, the query structure otherwise.
+ """
+ query_match = RQLExecuteMatcher.query_match_rgx.match(text)
+ if query_match is None:
+ return None
+ parameters_text = query_match.group('parameters')
+ quote_delim = query_match.group('quote_delim')
+ # first parameter is fully specified, no completion needed
+ if re.match(r"(.*?)%s" % quote_delim, parameters_text) is not None:
+ return None
+ func_prefix = query_match.group('func_prefix')
+ return {
+ # user's input
+ 'text': text,
+ # rql( or session.execute(
+ 'func_prefix': func_prefix,
+ # offset of rql query
+ 'rql_offset': len(func_prefix) + 2,
+ # incomplete rql query
+ 'rql_query': parameters_text,
+ }
+
+ def possible_matches(self, text):
+ """call ``rql.suggestions`` component to complete user's input.
+ """
+ # readline will only send last token, but we need the entire user's input
+ user_input = readline.get_line_buffer()
+ query_struct = self.match(user_input)
+ if query_struct is None:
+ return []
+ else:
+ # we must only send completions of the last token => compute where it
+ # starts relatively to the rql query itself.
+ completion_offset = readline.get_begidx() - query_struct['rql_offset']
+ rql_query = query_struct['rql_query']
+ return [suggestion[completion_offset:]
+ for suggestion in self.rsb.build_suggestions(rql_query)]
+
+
+class DefaultMatcher(AbstractMatcher):
+ """Default matcher: delegate to standard's `rlcompleter.Completer`` class
+ """
+ def __init__(self, local_ctx):
+ self.completer = Completer(local_ctx)
+
+ def possible_matches(self, text):
+ if "." in text:
+ return self.completer.attr_matches(text)
+ else:
+ return self.completer.global_matches(text)
+
+
+class CWShellCompleter(object):
+ """Custom auto-completion helper for cubicweb-ctl shell.
+
+ ``CWShellCompleter`` provides a ``complete`` method suitable for
+ ``readline.set_completer``.
+
+ Attributes:
+ matchers: the list of ``AbstractMatcher`` instances that will suggest
+ possible completions
+
+ The completion process is the following:
+
+ - readline calls the ``complete`` method with user's input,
+ - the ``complete`` method asks for each known matchers if
+ it can suggest completions for user's input.
+ """
+
+ def __init__(self, local_ctx):
+ # list of matchers to ask for possible matches on completion
+ self.matchers = [DefaultMatcher(local_ctx)]
+ self.matchers.insert(0, RQLExecuteMatcher(local_ctx, local_ctx['session']))
+
+ def complete(self, text, state):
+ """readline's completer method
+
+ cf http://docs.python.org/2/library/readline.html#readline.set_completer
+ for more details.
+
+ Implementation inspired by `rlcompleter.Completer`
+ """
+ if state == 0:
+ # reset self.matches
+ self.matches = []
+ for matcher in self.matchers:
+ matches = matcher.possible_matches(text)
+ if matches:
+ self.matches = matches
+ break
+ else:
+ return None # no matcher able to handle `text`
+ try:
+ return self.matches[state]
+ except IndexError:
+ return None
--- a/view.py Tue Oct 07 10:06:24 2014 +0200
+++ b/view.py Fri Oct 17 18:16:58 2014 +0200
@@ -501,28 +501,6 @@
class ReloadableMixIn(object):
"""simple mixin for reloadable parts of UI"""
- def user_callback(self, cb, args, msg=None, nonify=False):
- """register the given user callback and return a URL to call it ready to be
- inserted in html
- """
- self._cw.add_js('cubicweb.ajax.js')
- if nonify:
- _cb = cb
- def cb(*args):
- _cb(*args)
- cbname = self._cw.register_onetime_callback(cb, *args)
- return self.build_js(cbname, xml_escape(msg or ''))
-
- def build_update_js_call(self, cbname, msg):
- rql = self.cw_rset.printable_rql()
- return "javascript: %s" % js.userCallbackThenUpdateUI(
- cbname, self.__regid__, rql, msg, self.__registry__, self.domid)
-
- def build_reload_js_call(self, cbname, msg):
- return "javascript: %s" % js.userCallbackThenReloadPage(cbname, msg)
-
- build_js = build_update_js_call # expect updatable component by default
-
@property
def domid(self):
return domid(self.__regid__)
--- a/web/application.py Tue Oct 07 10:06:24 2014 +0200
+++ b/web/application.py Fri Oct 17 18:16:58 2014 +0200
@@ -23,6 +23,7 @@
from time import clock, time
from contextlib import contextmanager
from warnings import warn
+import json
import httplib
@@ -589,8 +590,10 @@
status = httplib.INTERNAL_SERVER_ERROR
if isinstance(ex, PublishException) and ex.status is not None:
status = ex.status
- req.status_out = status
- json_dumper = getattr(ex, 'dumps', lambda : unicode(ex))
+ if req.status_out < 400:
+ # don't overwrite it if it's already set
+ req.status_out = status
+ json_dumper = getattr(ex, 'dumps', lambda : json.dumps({'reason': unicode(ex)}))
return json_dumper()
# special case handling
--- a/web/data/cubicweb.ajax.js Tue Oct 07 10:06:24 2014 +0200
+++ b/web/data/cubicweb.ajax.js Fri Oct 17 18:16:58 2014 +0200
@@ -88,8 +88,8 @@
});
var AJAX_PREFIX_URL = 'ajax';
-var JSON_BASE_URL = baseuri() + 'json?';
-var AJAX_BASE_URL = baseuri() + AJAX_PREFIX_URL + '?';
+var JSON_BASE_URL = BASE_URL + 'json?';
+var AJAX_BASE_URL = BASE_URL + AJAX_PREFIX_URL + '?';
jQuery.extend(cw.ajax, {
@@ -122,9 +122,7 @@
* (e.g. http://..../data??resource1.js,resource2.js)
*/
_modconcatLikeUrl: function(url) {
- var base = baseuri();
- if (!base.endswith('/')) { base += '/'; }
- var modconcat_rgx = new RegExp('(' + base + 'data/([a-z0-9]+/)?)\\?\\?(.+)');
+ var modconcat_rgx = new RegExp('(' + BASE_URL + 'data/([a-z0-9]+/)?)\\?\\?(.+)');
return modconcat_rgx.exec(url);
},
@@ -379,8 +377,8 @@
* dictionary, `reqtype` the HTTP request type (get 'GET' or 'POST').
*/
function loadRemote(url, form, reqtype, sync) {
- if (!url.toLowerCase().startswith(baseuri().toLowerCase())) {
- url = baseuri() + url;
+ if (!url.toLowerCase().startswith(BASE_URL.toLowerCase())) {
+ url = BASE_URL + url;
}
if (!sync) {
var deferred = new Deferred();
@@ -601,7 +599,7 @@
var fck = new FCKeditor(this.id);
fck.Config['CustomConfigurationsPath'] = fckconfigpath;
fck.Config['DefaultLanguage'] = fcklang;
- fck.BasePath = baseuri() + "fckeditor/";
+ fck.BasePath = BASE_URL + "fckeditor/";
fck.ReplaceTextarea();
} else {
cw.log('fckeditor could not be found.');
--- a/web/data/cubicweb.edition.js Tue Oct 07 10:06:24 2014 +0200
+++ b/web/data/cubicweb.edition.js Fri Oct 17 18:16:58 2014 +0200
@@ -67,7 +67,7 @@
rql: rql_for_eid(eid),
'__notemplate': 1
};
- var d = jQuery('#unrelatedDivs_' + eid).loadxhtml(baseuri() + 'view', args, 'post', 'append');
+ var d = jQuery('#unrelatedDivs_' + eid).loadxhtml(BASE_URL + 'view', args, 'post', 'append');
d.addCallback(function() {
_showMatchingSelect(eid, jQuery('#' + divId));
});
--- a/web/data/cubicweb.facets.js Tue Oct 07 10:06:24 2014 +0200
+++ b/web/data/cubicweb.facets.js Fri Oct 17 18:16:58 2014 +0200
@@ -69,7 +69,7 @@
}
var $focusLink = $('#focusLink');
if ($focusLink.length) {
- var url = baseuri()+ 'view?rql=' + encodeURIComponent(rql);
+ var url = BASE_URL + 'view?rql=' + encodeURIComponent(rql);
if (vid) {
url += '&vid=' + encodeURIComponent(vid);
}
--- a/web/data/cubicweb.htmlhelpers.js Tue Oct 07 10:06:24 2014 +0200
+++ b/web/data/cubicweb.htmlhelpers.js Fri Oct 17 18:16:58 2014 +0200
@@ -12,20 +12,13 @@
/**
* .. function:: baseuri()
*
- * returns the document's baseURI. (baseuri() uses document.baseURI if
- * available and inspects the <base> tag manually otherwise.)
+ * returns the document's baseURI.
*/
-function baseuri() {
- if (typeof BASE_URL === 'undefined') {
- // backward compatibility, BASE_URL might be undefined
- var uri = document.baseURI;
- if (uri) { // some browsers don't define baseURI
- return uri.toLowerCase();
- }
- return jQuery('base').attr('href').toLowerCase();
- }
- return BASE_URL;
-}
+baseuri = cw.utils.deprecatedFunction(
+ "[3.20] baseuri() is deprecated, use BASE_URL instead",
+ function () {
+ return BASE_URL;
+ });
/**
* .. function:: setProgressCursor()
@@ -107,18 +100,6 @@
}
/**
- * .. function:: popupLoginBox()
- *
- * toggles visibility of login popup div
- */
-// XXX used exactly ONCE in basecomponents
-popupLoginBox = cw.utils.deprecatedFunction(
- function() {
- $('#popupLoginBox').toggleClass('hidden');
- jQuery('#__login:visible').focus();
-});
-
-/**
* .. function getElementsMatching(tagName, properties, \/* optional \*\/ parent)
*
* returns the list of elements in the document matching the tag name
--- a/web/data/cubicweb.js Tue Oct 07 10:06:24 2014 +0200
+++ b/web/data/cubicweb.js Fri Oct 17 18:16:58 2014 +0200
@@ -208,91 +208,40 @@
},
/**
- * .. function:: formContents(elem \/* = document.body *\/)
+ * .. function:: formContents(elem)
*
- * this implementation comes from MochiKit
+ * cannot use jQuery.serializeArray() directly because of FCKeditor
*/
- formContents: function (elem /* = document.body */ ) {
- var names = [];
- var values = [];
- if (typeof(elem) == "undefined" || elem === null) {
- elem = document.body;
- } else {
- elem = cw.getNode(elem);
- }
- cw.utils.nodeWalkDepthFirst(elem, function (elem) {
- var name = elem.name;
- if (name && name.length) {
- if (elem.disabled) {
- return null;
- }
- var tagName = elem.tagName.toUpperCase();
- if (tagName === "INPUT" && (elem.type == "radio" || elem.type == "checkbox") && !elem.checked) {
- return null;
- }
- if (tagName === "SELECT") {
- if (elem.type == "select-one") {
- if (elem.selectedIndex >= 0) {
- var opt = elem.options[elem.selectedIndex];
- var v = opt.value;
- if (!v) {
- var h = opt.outerHTML;
- // internet explorer sure does suck.
- if (h && !h.match(/^[^>]+\svalue\s*=/i)) {
- v = opt.text;
- }
- }
- names.push(name);
- values.push(v);
+ formContents: function (elem) {
+ var $elem, array, names, values;
+ $elem = cw.jqNode(elem);
+ array = $elem.serializeArray();
+
+ if (typeof FCKeditor !== 'undefined') {
+ $elem.find('textarea').each(function (idx, textarea) {
+ var fck = FCKeditorAPI.GetInstance(textarea.id);
+ if (fck) {
+ array = jQuery.map(array, function (dict) {
+ if (dict.name === textarea.name) {
+ // filter out the textarea's - likely empty - value ...
return null;
}
- // no form elements?
- names.push(name);
- values.push("");
- return null;
- } else {
- var opts = elem.options;
- if (!opts.length) {
- names.push(name);
- values.push("");
- return null;
- }
- for (var i = 0; i < opts.length; i++) {
- var opt = opts[i];
- if (!opt.selected) {
- continue;
- }
- var v = opt.value;
- if (!v) {
- var h = opt.outerHTML;
- // internet explorer sure does suck.
- if (h && !h.match(/^[^>]+\svalue\s*=/i)) {
- v = opt.text;
- }
- }
- names.push(name);
- values.push(v);
- }
- return null;
- }
+ return dict;
+ });
+ // ... so we can put the HTML coming from FCKeditor instead.
+ array.push({
+ name: textarea.name,
+ value: fck.GetHTML()
+ });
}
- if (tagName === "FORM" || tagName === "P" || tagName === "SPAN" || tagName === "DIV") {
- return elem.childNodes;
- }
- var value = elem.value;
- if (tagName === "TEXTAREA") {
- if (typeof(FCKeditor) != 'undefined') {
- var fck = FCKeditorAPI.GetInstance(elem.id);
- if (fck) {
- value = fck.GetHTML();
- }
- }
- }
- names.push(name);
- values.push(value || '');
- return null;
- }
- return elem.childNodes;
+ });
+ }
+
+ names = [];
+ values = [];
+ jQuery.each(array, function (idx, dict) {
+ names.push(dict.name);
+ values.push(dict.value);
});
return [names, values];
},
--- a/web/data/cubicweb.timeline-bundle.js Tue Oct 07 10:06:24 2014 +0200
+++ b/web/data/cubicweb.timeline-bundle.js Fri Oct 17 18:16:58 2014 +0200
@@ -3,8 +3,8 @@
* :organization: Logilab
*/
-var SimileAjax_urlPrefix = baseuri() + 'data/';
-var Timeline_urlPrefix = baseuri() + 'data/';
+var SimileAjax_urlPrefix = BASE_URL + 'data/';
+var Timeline_urlPrefix = BASE_URL + 'data/';
/*
* Simile Ajax API
--- a/web/formfields.py Tue Oct 07 10:06:24 2014 +0200
+++ b/web/formfields.py Fri Oct 17 18:16:58 2014 +0200
@@ -529,6 +529,7 @@
"""
widget = fw.TextArea
size = 45
+ placeholder = None
def __init__(self, name=None, max_length=None, **kwargs):
self.max_length = max_length # must be set before super call
@@ -547,6 +548,9 @@
elif isinstance(self.widget, fw.TextInput):
self.init_text_input(self.widget)
+ if self.placeholder:
+ self.widget.attrs.setdefault('placeholder', self.placeholder)
+
def init_text_input(self, widget):
if self.max_length:
widget.attrs.setdefault('size', min(self.size, self.max_length))
@@ -557,6 +561,11 @@
widget.attrs.setdefault('cols', 60)
widget.attrs.setdefault('rows', 5)
+ def set_placeholder(self, placeholder):
+ self.placeholder = placeholder
+ if self.widget and self.placeholder:
+ self.widget.attrs.setdefault('placeholder', self.placeholder)
+
class PasswordField(StringField):
"""Use this field to edit password (`Password` yams type, encoded python
--- a/web/formwidgets.py Tue Oct 07 10:06:24 2014 +0200
+++ b/web/formwidgets.py Fri Oct 17 18:16:58 2014 +0200
@@ -210,6 +210,8 @@
attrs['id'] = field.dom_id(form, self.suffix)
if self.settabindex and not 'tabindex' in attrs:
attrs['tabindex'] = form._cw.next_tabindex()
+ if 'placeholder' in attrs:
+ attrs['placeholder'] = form._cw._(attrs['placeholder'])
return attrs
def values(self, form, field):
--- a/web/http_headers.py Tue Oct 07 10:06:24 2014 +0200
+++ b/web/http_headers.py Fri Oct 17 18:16:58 2014 +0200
@@ -1339,6 +1339,9 @@
h = self._headers.get(name, None)
r = self.handler.generate(name, h)
if r is not None:
+ assert isinstance(r, list)
+ for v in r:
+ assert isinstance(v, str)
self._raw_headers[name] = r
return r
@@ -1377,6 +1380,9 @@
Value should be a list of strings, each being one header of the
given name.
"""
+ assert isinstance(value, list)
+ for v in value:
+ assert isinstance(v, str)
name = name.lower()
self._raw_headers[name] = value
self._headers[name] = _RecalcNeeded
--- a/web/request.py Tue Oct 07 10:06:24 2014 +0200
+++ b/web/request.py Fri Oct 17 18:16:58 2014 +0200
@@ -179,7 +179,7 @@
self.ajax_request = value
json_request = property(_get_json_request, _set_json_request)
- def base_url(self, secure=None):
+ def _base_url(self, secure=None):
"""return the root url of the instance
secure = False -> base-url
@@ -192,7 +192,7 @@
if secure:
base_url = self.vreg.config.get('https-url')
if base_url is None:
- base_url = super(_CubicWebRequestBase, self).base_url()
+ base_url = super(_CubicWebRequestBase, self)._base_url()
return base_url
@property
@@ -786,10 +786,6 @@
if 'Expires' not in self.headers_out:
# Expires header seems to be required by IE7 -- Are you sure ?
self.add_header('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT')
- if self.http_method() == 'HEAD':
- self.status_out = 200
- # XXX replace by True once validate_cache bw compat method is dropped
- return 200
# /!\ no raise, the function returns and we keep processing the request
else:
# overwrite headers_out to forge a brand new not-modified response
--- a/web/test/unittest_http.py Tue Oct 07 10:06:24 2014 +0200
+++ b/web/test/unittest_http.py Fri Oct 17 18:16:58 2014 +0200
@@ -227,7 +227,7 @@
hout = [('etag', 'rhino/really-not-babar'),
]
req = _test_cache(hin, hout, method='HEAD')
- self.assertCache(200, req.status_out, 'modifier HEAD verb')
+ self.assertCache(None, req.status_out, 'modifier HEAD verb')
# not modified
hin = [('if-none-match', 'babar'),
]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_views_forms.py Fri Oct 17 18:16:58 2014 +0200
@@ -0,0 +1,36 @@
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+
+from cubicweb.devtools.testlib import CubicWebTC
+
+class InlinedFormTC(CubicWebTC):
+
+ def test_linked_to(self):
+ req = self.request()
+ formview = req.vreg['views'].select(
+ 'inline-creation', req,
+ etype='File', rtype='described_by_test', role='subject',
+ peid=123,
+ petype='Salesterm')
+ self.assertEqual({('described_by_test', 'object'): [123]},
+ formview.form.linked_to)
+
+if __name__ == '__main__':
+ from logilab.common.testlib import unittest_main
+ unittest_main()
+
--- a/web/test/unittest_web.py Tue Oct 07 10:06:24 2014 +0200
+++ b/web/test/unittest_web.py Fri Oct 17 18:16:58 2014 +0200
@@ -94,6 +94,7 @@
self.assertEqual(webreq.status_code, 200)
self.assertDictEqual(expect, loads(webreq.content))
+
class LanguageTC(CubicWebServerTC):
def test_language_neg(self):
@@ -104,6 +105,19 @@
webreq = self.web_request(headers=headers)
self.assertIn('lang="en"', webreq.read())
+ def test_response_codes(self):
+ with self.admin_access.client_cnx() as cnx:
+ admin_eid = cnx.user.eid
+ # guest can't see admin
+ webreq = self.web_request('/%d' % admin_eid)
+ self.assertEqual(webreq.status, 403)
+
+ # but admin can
+ self.web_login()
+ webreq = self.web_request('/%d' % admin_eid)
+ self.assertEqual(webreq.status, 200)
+
+
class LogQueriesTC(CubicWebServerTC):
@classmethod
def init_config(cls, config):
--- a/web/views/ajaxedit.py Tue Oct 07 10:06:24 2014 +0200
+++ b/web/views/ajaxedit.py Fri Oct 17 18:16:58 2014 +0200
@@ -36,8 +36,6 @@
cw_property_defs = {} # don't want to inherit this from Box
expected_kwargs = form_params = ('rtype', 'target')
- build_js = component.EditRelationMixIn.build_reload_js_call
-
def cell_call(self, row, col, rtype=None, target=None, etype=None):
self.rtype = rtype or self._cw.form['rtype']
self.target = target or self._cw.form['target']
--- a/web/views/forms.py Tue Oct 07 10:06:24 2014 +0200
+++ b/web/views/forms.py Fri Oct 17 18:16:58 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -41,6 +41,7 @@
but you'll use this one rarely.
"""
+
__docformat__ = "restructuredtext en"
from warnings import warn
@@ -50,7 +51,7 @@
from logilab.common.textutils import splitstrip
from logilab.common.deprecation import deprecated
-from cubicweb import ValidationError
+from cubicweb import ValidationError, neg_role
from cubicweb.utils import support_args
from cubicweb.predicates import non_final_entity, match_kwargs, one_line_rset
from cubicweb.web import RequestError, ProcessFormError
@@ -399,12 +400,21 @@
@property
@cached
def linked_to(self):
- # if current form is not the main form, exit immediately
+ linked_to = {}
+ # case where this is an embeded creation form
+ try:
+ eid = int(self.cw_extra_kwargs['peid'])
+ except KeyError:
+ pass
+ else:
+ ltrtype = self.cw_extra_kwargs['rtype']
+ ltrole = neg_role(self.cw_extra_kwargs['role'])
+ linked_to[(ltrtype, ltrole)] = [eid]
+ # now consider __linkto if the current form is the main form
try:
self.field_by_name('__maineid')
except form.FieldNotFound:
- return {}
- linked_to = {}
+ return linked_to
for linkto in self._cw.list_form_param('__linkto'):
ltrtype, eid, ltrole = linkto.split(':')
linked_to.setdefault((ltrtype, ltrole), []).append(int(eid))
--- a/wsgi/request.py Tue Oct 07 10:06:24 2014 +0200
+++ b/wsgi/request.py Fri Oct 17 18:16:58 2014 +0200
@@ -70,7 +70,7 @@
if k.startswith('HTTP_'))
if 'CONTENT_TYPE' in environ:
headers_in['Content-Type'] = environ['CONTENT_TYPE']
- https = environ["wsgi.url_scheme"] == 'https'
+ https = self.is_secure()
if self.path.startswith('/https/'):
self.path = self.path[6:]
self.environ['PATH_INFO'] = self.path
@@ -118,32 +118,8 @@
## wsgi request helpers ###################################################
- def instance_uri(self):
- """Return the instance's base URI (no PATH_INFO or QUERY_STRING)
-
- see python2.5's wsgiref.util.instance_uri code
- """
- environ = self.environ
- url = environ['wsgi.url_scheme'] + '://'
- if environ.get('HTTP_HOST'):
- url += environ['HTTP_HOST']
- else:
- url += environ['SERVER_NAME']
- if environ['wsgi.url_scheme'] == 'https':
- if environ['SERVER_PORT'] != '443':
- url += ':' + environ['SERVER_PORT']
- else:
- if environ['SERVER_PORT'] != '80':
- url += ':' + environ['SERVER_PORT']
- url += quote(environ.get('SCRIPT_NAME') or '/')
- return url
-
- def get_full_path(self):
- return '%s%s' % (self.path, self.environ.get('QUERY_STRING', '') and ('?' + self.environ.get('QUERY_STRING', '')) or '')
-
def is_secure(self):
- return 'wsgi.url_scheme' in self.environ \
- and self.environ['wsgi.url_scheme'] == 'https'
+ return self.environ['wsgi.url_scheme'] == 'https'
def get_posted_data(self):
# The WSGI spec says 'QUERY_STRING' may be absent.